From a2d8f803bd9c7901c825ed450da2a6f905b1f3ac Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 31 Oct 2024 01:00:00 +0000 Subject: [PATCH] chore: Update gapic-generator-python to v1.20.2 PiperOrigin-RevId: 691540410 Source-Link: https://github.com/googleapis/googleapis/commit/b43cfb18b7f23641d1254188a9cc2b3515895d98 Source-Link: https://github.com/googleapis/googleapis-gen/commit/40fd27b08abb2e8b8a84b57941e1226971f37a97 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRtcy8uT3dsQm90LnlhbWwiLCJoIjoiNDBmZDI3YjA4YWJiMmU4YjhhODRiNTc5NDFlMTIyNjk3MWYzN2E5NyJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXF1b3Rhcy8uT3dsQm90LnlhbWwiLCJoIjoiNDBmZDI3YjA4YWJiMmU4YjhhODRiNTc5NDFlMTIyNjk3MWYzN2E5NyJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNlcnZpY2UtY29udHJvbC8uT3dsQm90LnlhbWwiLCJoIjoiNDBmZDI3YjA4YWJiMmU4YjhhODRiNTc5NDFlMTIyNjk3MWYzN2E5NyJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNlcnZpY2UtbWFuYWdlbWVudC8uT3dsQm90LnlhbWwiLCJoIjoiNDBmZDI3YjA4YWJiMmU4YjhhODRiNTc5NDFlMTIyNjk3MWYzN2E5NyJ9 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXNlcnZpY2UtdXNhZ2UvLk93bEJvdC55YW1sIiwiaCI6IjQwZmQyN2IwOGFiYjJlOGI4YTg0YjU3OTQxZTEyMjY5NzFmMzdhOTcifQ== --- .../google-cloud-dms/v1/.coveragerc | 13 + owl-bot-staging/google-cloud-dms/v1/.flake8 | 33 + .../google-cloud-dms/v1/MANIFEST.in | 2 + .../google-cloud-dms/v1/README.rst | 49 + .../v1/docs/_static/custom.css | 3 + .../clouddms_v1/data_migration_service.rst | 10 + .../v1/docs/clouddms_v1/services_.rst | 6 + .../v1/docs/clouddms_v1/types_.rst | 6 + .../google-cloud-dms/v1/docs/conf.py | 376 + .../google-cloud-dms/v1/docs/index.rst | 7 + .../v1/google/cloud/clouddms/__init__.py | 297 + .../v1/google/cloud/clouddms/gapic_version.py | 16 + .../v1/google/cloud/clouddms/py.typed | 2 + .../v1/google/cloud/clouddms_v1/__init__.py | 298 + .../cloud/clouddms_v1/gapic_metadata.json | 433 + .../google/cloud/clouddms_v1/gapic_version.py | 16 + .../v1/google/cloud/clouddms_v1/py.typed | 2 + .../cloud/clouddms_v1/services/__init__.py | 15 + .../data_migration_service/__init__.py | 22 + .../data_migration_service/async_client.py | 5479 +++++ .../services/data_migration_service/client.py | 5823 +++++ .../services/data_migration_service/pagers.py | 974 + .../transports/README.rst | 9 + .../transports/__init__.py | 33 + .../data_migration_service/transports/base.py | 854 + .../data_migration_service/transports/grpc.py | 1569 ++ .../transports/grpc_asyncio.py | 1835 ++ .../cloud/clouddms_v1/types/__init__.py | 294 + .../cloud/clouddms_v1/types/clouddms.py | 2053 ++ .../clouddms_v1/types/clouddms_resources.py | 2119 ++ .../types/conversionworkspace_resources.py | 2719 +++ owl-bot-staging/google-cloud-dms/v1/mypy.ini | 3 + .../google-cloud-dms/v1/noxfile.py | 280 + ...ervice_apply_conversion_workspace_async.py | 57 + ...service_apply_conversion_workspace_sync.py | 57 + ...rvice_commit_conversion_workspace_async.py | 56 + ...ervice_commit_conversion_workspace_sync.py | 56 + ...vice_convert_conversion_workspace_async.py | 55 + ...rvice_convert_conversion_workspace_sync.py | 55 + ...service_create_connection_profile_async.py | 64 + ..._service_create_connection_profile_sync.py | 64 + ...rvice_create_conversion_workspace_async.py | 64 + ...ervice_create_conversion_workspace_sync.py | 64 + ...ation_service_create_mapping_rule_async.py | 59 + ...ration_service_create_mapping_rule_sync.py | 59 + ...tion_service_create_migration_job_async.py | 65 + ...ation_service_create_migration_job_sync.py | 65 + ...service_create_private_connection_async.py | 62 + ..._service_create_private_connection_sync.py | 62 + ...service_delete_connection_profile_async.py | 56 + ..._service_delete_connection_profile_sync.py | 56 + ...rvice_delete_conversion_workspace_async.py | 56 + ...ervice_delete_conversion_workspace_sync.py | 56 + ...ation_service_delete_mapping_rule_async.py | 50 + ...ration_service_delete_mapping_rule_sync.py | 50 + ...tion_service_delete_migration_job_async.py | 56 + ...ation_service_delete_migration_job_sync.py | 56 + ...service_delete_private_connection_async.py | 56 + ..._service_delete_private_connection_sync.py | 56 + ...be_conversion_workspace_revisions_async.py | 52 + ...ibe_conversion_workspace_revisions_sync.py | 52 + ...ervice_describe_database_entities_async.py | 54 + ...service_describe_database_entities_sync.py | 54 + ...igration_service_fetch_static_ips_async.py | 53 + ...migration_service_fetch_static_ips_sync.py | 53 + ...ation_service_generate_ssh_script_async.py | 56 + ...ration_service_generate_ssh_script_sync.py | 56 + ...service_generate_tcp_proxy_script_async.py | 54 + ..._service_generate_tcp_proxy_script_sync.py | 54 + ...on_service_get_connection_profile_async.py | 52 + ...ion_service_get_connection_profile_sync.py | 52 + ..._service_get_conversion_workspace_async.py | 52 + ...n_service_get_conversion_workspace_sync.py | 52 + ...igration_service_get_mapping_rule_async.py | 52 + ...migration_service_get_mapping_rule_sync.py | 52 + ...gration_service_get_migration_job_async.py | 52 + ...igration_service_get_migration_job_sync.py | 52 + ...on_service_get_private_connection_async.py | 52 + ...ion_service_get_private_connection_sync.py | 52 + ...tion_service_import_mapping_rules_async.py | 63 + ...ation_service_import_mapping_rules_sync.py | 63 + ..._service_list_connection_profiles_async.py | 53 + ...n_service_list_connection_profiles_sync.py | 53 + ...ervice_list_conversion_workspaces_async.py | 53 + ...service_list_conversion_workspaces_sync.py | 53 + ...ration_service_list_mapping_rules_async.py | 53 + ...gration_service_list_mapping_rules_sync.py | 53 + ...ation_service_list_migration_jobs_async.py | 53 + ...ration_service_list_migration_jobs_sync.py | 53 + ..._service_list_private_connections_async.py | 53 + ...n_service_list_private_connections_sync.py | 53 + ...ion_service_promote_migration_job_async.py | 55 + ...tion_service_promote_migration_job_sync.py | 55 + ...ion_service_restart_migration_job_async.py | 55 + ...tion_service_restart_migration_job_sync.py | 55 + ...tion_service_resume_migration_job_async.py | 55 + ...ation_service_resume_migration_job_sync.py | 55 + ...ice_rollback_conversion_workspace_async.py | 56 + ...vice_rollback_conversion_workspace_sync.py | 56 + ...on_service_search_background_jobs_async.py | 52 + ...ion_service_search_background_jobs_sync.py | 52 + ...service_seed_conversion_workspace_async.py | 56 + ..._service_seed_conversion_workspace_sync.py | 56 + ...ation_service_start_migration_job_async.py | 55 + ...ration_service_start_migration_job_sync.py | 55 + ...ration_service_stop_migration_job_async.py | 55 + ...gration_service_stop_migration_job_sync.py | 55 + ...service_update_connection_profile_async.py | 62 + ..._service_update_connection_profile_sync.py | 62 + ...rvice_update_conversion_workspace_async.py | 62 + ...ervice_update_conversion_workspace_sync.py | 62 + ...tion_service_update_migration_job_async.py | 63 + ...ation_service_update_migration_job_sync.py | 63 + ...tion_service_verify_migration_job_async.py | 55 + ...ation_service_verify_migration_job_sync.py | 55 + ...pet_metadata_google.cloud.clouddms.v1.json | 6578 ++++++ .../v1/scripts/fixup_clouddms_v1_keywords.py | 216 + owl-bot-staging/google-cloud-dms/v1/setup.py | 99 + .../v1/testing/constraints-3.10.txt | 7 + .../v1/testing/constraints-3.11.txt | 7 + .../v1/testing/constraints-3.12.txt | 7 + .../v1/testing/constraints-3.13.txt | 7 + .../v1/testing/constraints-3.7.txt | 11 + .../v1/testing/constraints-3.8.txt | 7 + .../v1/testing/constraints-3.9.txt | 7 + .../google-cloud-dms/v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/clouddms_v1/__init__.py | 16 + .../test_data_migration_service.py | 18023 ++++++++++++++++ .../google-cloud-quotas/v1/.coveragerc | 13 + .../google-cloud-quotas/v1/.flake8 | 33 + .../google-cloud-quotas/v1/MANIFEST.in | 2 + .../google-cloud-quotas/v1/README.rst | 49 + .../v1/docs/_static/custom.css | 3 + .../v1/docs/cloudquotas_v1/cloud_quotas.rst | 10 + .../v1/docs/cloudquotas_v1/services_.rst | 6 + .../v1/docs/cloudquotas_v1/types_.rst | 6 + .../google-cloud-quotas/v1/docs/conf.py | 376 + .../google-cloud-quotas/v1/docs/index.rst | 7 + .../v1/google/cloud/cloudquotas/__init__.py | 59 + .../google/cloud/cloudquotas/gapic_version.py | 16 + .../v1/google/cloud/cloudquotas/py.typed | 2 + .../google/cloud/cloudquotas_v1/__init__.py | 60 + .../cloud/cloudquotas_v1/gapic_metadata.json | 118 + .../cloud/cloudquotas_v1/gapic_version.py | 16 + .../v1/google/cloud/cloudquotas_v1/py.typed | 2 + .../cloud/cloudquotas_v1/services/__init__.py | 15 + .../services/cloud_quotas/__init__.py | 22 + .../services/cloud_quotas/async_client.py | 981 + .../services/cloud_quotas/client.py | 1314 ++ .../services/cloud_quotas/pagers.py | 298 + .../cloud_quotas/transports/README.rst | 9 + .../cloud_quotas/transports/__init__.py | 38 + .../services/cloud_quotas/transports/base.py | 279 + .../services/cloud_quotas/transports/grpc.py | 416 + .../cloud_quotas/transports/grpc_asyncio.py | 516 + .../services/cloud_quotas/transports/rest.py | 837 + .../cloud_quotas/transports/rest_base.py | 386 + .../cloud/cloudquotas_v1/types/__init__.py | 54 + .../cloud/cloudquotas_v1/types/cloudquotas.py | 322 + .../cloud/cloudquotas_v1/types/resources.py | 525 + .../google-cloud-quotas/v1/mypy.ini | 3 + .../google-cloud-quotas/v1/noxfile.py | 280 + ...ud_quotas_create_quota_preference_async.py | 58 + ...oud_quotas_create_quota_preference_sync.py | 58 + ...rated_cloud_quotas_get_quota_info_async.py | 52 + ...erated_cloud_quotas_get_quota_info_sync.py | 52 + ...cloud_quotas_get_quota_preference_async.py | 52 + ..._cloud_quotas_get_quota_preference_sync.py | 52 + ...ted_cloud_quotas_list_quota_infos_async.py | 53 + ...ated_cloud_quotas_list_quota_infos_sync.py | 53 + ...oud_quotas_list_quota_preferences_async.py | 53 + ...loud_quotas_list_quota_preferences_sync.py | 53 + ...ud_quotas_update_quota_preference_async.py | 57 + ...oud_quotas_update_quota_preference_sync.py | 57 + ...et_metadata_google.api.cloudquotas.v1.json | 1005 + .../scripts/fixup_cloudquotas_v1_keywords.py | 181 + .../google-cloud-quotas/v1/setup.py | 98 + .../v1/testing/constraints-3.10.txt | 6 + .../v1/testing/constraints-3.11.txt | 6 + .../v1/testing/constraints-3.12.txt | 6 + .../v1/testing/constraints-3.13.txt | 6 + .../v1/testing/constraints-3.7.txt | 10 + .../v1/testing/constraints-3.8.txt | 6 + .../v1/testing/constraints-3.9.txt | 6 + .../google-cloud-quotas/v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/cloudquotas_v1/__init__.py | 16 + .../gapic/cloudquotas_v1/test_cloud_quotas.py | 6270 ++++++ .../v1/.coveragerc | 13 + .../google-cloud-service-control/v1/.flake8 | 33 + .../v1/MANIFEST.in | 2 + .../v1/README.rst | 49 + .../v1/docs/_static/custom.css | 3 + .../v1/docs/conf.py | 376 + .../v1/docs/index.rst | 7 + .../servicecontrol_v1/quota_controller.rst | 6 + .../servicecontrol_v1/service_controller.rst | 6 + .../v1/docs/servicecontrol_v1/services_.rst | 7 + .../v1/docs/servicecontrol_v1/types_.rst | 6 + .../google/cloud/servicecontrol/__init__.py | 65 + .../cloud/servicecontrol/gapic_version.py | 16 + .../v1/google/cloud/servicecontrol/py.typed | 2 + .../cloud/servicecontrol_v1/__init__.py | 66 + .../servicecontrol_v1/gapic_metadata.json | 92 + .../cloud/servicecontrol_v1/gapic_version.py | 16 + .../google/cloud/servicecontrol_v1/py.typed | 2 + .../servicecontrol_v1/services/__init__.py | 15 + .../services/quota_controller/__init__.py | 22 + .../services/quota_controller/async_client.py | 333 + .../services/quota_controller/client.py | 654 + .../quota_controller/transports/README.rst | 9 + .../quota_controller/transports/__init__.py | 38 + .../quota_controller/transports/base.py | 155 + .../quota_controller/transports/grpc.py | 284 + .../transports/grpc_asyncio.py | 305 + .../quota_controller/transports/rest.py | 281 + .../quota_controller/transports/rest_base.py | 130 + .../services/service_controller/__init__.py | 22 + .../service_controller/async_client.py | 438 + .../services/service_controller/client.py | 759 + .../service_controller/transports/README.rst | 9 + .../service_controller/transports/__init__.py | 38 + .../service_controller/transports/base.py | 178 + .../service_controller/transports/grpc.py | 334 + .../transports/grpc_asyncio.py | 369 + .../service_controller/transports/rest.py | 390 + .../transports/rest_base.py | 169 + .../cloud/servicecontrol_v1/types/__init__.py | 68 + .../servicecontrol_v1/types/check_error.py | 165 + .../servicecontrol_v1/types/distribution.py | 241 + .../servicecontrol_v1/types/http_request.py | 156 + .../servicecontrol_v1/types/log_entry.py | 238 + .../servicecontrol_v1/types/metric_value.py | 153 + .../servicecontrol_v1/types/operation.py | 187 + .../types/quota_controller.py | 328 + .../types/service_controller.py | 335 + .../google-cloud-service-control/v1/mypy.ini | 3 + .../v1/noxfile.py | 280 + ...d_quota_controller_allocate_quota_async.py | 51 + ...ed_quota_controller_allocate_quota_sync.py | 51 + ...enerated_service_controller_check_async.py | 51 + ...generated_service_controller_check_sync.py | 51 + ...nerated_service_controller_report_async.py | 51 + ...enerated_service_controller_report_sync.py | 51 + ...metadata_google.api.servicecontrol.v1.json | 474 + .../fixup_servicecontrol_v1_keywords.py | 178 + .../google-cloud-service-control/v1/setup.py | 98 + .../v1/testing/constraints-3.10.txt | 6 + .../v1/testing/constraints-3.11.txt | 6 + .../v1/testing/constraints-3.12.txt | 6 + .../v1/testing/constraints-3.13.txt | 6 + .../v1/testing/constraints-3.7.txt | 10 + .../v1/testing/constraints-3.8.txt | 6 + .../v1/testing/constraints-3.9.txt | 6 + .../v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/servicecontrol_v1/__init__.py | 16 + .../test_quota_controller.py | 1864 ++ .../test_service_controller.py | 2323 ++ .../v2/.coveragerc | 13 + .../google-cloud-service-control/v2/.flake8 | 33 + .../v2/MANIFEST.in | 2 + .../v2/README.rst | 49 + .../v2/docs/_static/custom.css | 3 + .../v2/docs/conf.py | 376 + .../v2/docs/index.rst | 7 + .../servicecontrol_v2/service_controller.rst | 6 + .../v2/docs/servicecontrol_v2/services_.rst | 6 + .../v2/docs/servicecontrol_v2/types_.rst | 6 + .../google/cloud/servicecontrol/__init__.py | 39 + .../cloud/servicecontrol/gapic_version.py | 16 + .../v2/google/cloud/servicecontrol/py.typed | 2 + .../cloud/servicecontrol_v2/__init__.py | 40 + .../servicecontrol_v2/gapic_metadata.json | 58 + .../cloud/servicecontrol_v2/gapic_version.py | 16 + .../google/cloud/servicecontrol_v2/py.typed | 2 + .../servicecontrol_v2/services/__init__.py | 15 + .../services/service_controller/__init__.py | 22 + .../service_controller/async_client.py | 451 + .../services/service_controller/client.py | 772 + .../service_controller/transports/README.rst | 9 + .../service_controller/transports/__init__.py | 38 + .../service_controller/transports/base.py | 178 + .../service_controller/transports/grpc.py | 345 + .../transports/grpc_asyncio.py | 380 + .../service_controller/transports/rest.py | 397 + .../transports/rest_base.py | 169 + .../cloud/servicecontrol_v2/types/__init__.py | 32 + .../types/service_controller.py | 232 + .../google-cloud-service-control/v2/mypy.ini | 3 + .../v2/noxfile.py | 280 + ...enerated_service_controller_check_async.py | 51 + ...generated_service_controller_check_sync.py | 51 + ...nerated_service_controller_report_async.py | 51 + ...enerated_service_controller_report_sync.py | 51 + ...metadata_google.api.servicecontrol.v2.json | 321 + .../fixup_servicecontrol_v2_keywords.py | 177 + .../google-cloud-service-control/v2/setup.py | 98 + .../v2/testing/constraints-3.10.txt | 6 + .../v2/testing/constraints-3.11.txt | 6 + .../v2/testing/constraints-3.12.txt | 6 + .../v2/testing/constraints-3.13.txt | 6 + .../v2/testing/constraints-3.7.txt | 10 + .../v2/testing/constraints-3.8.txt | 6 + .../v2/testing/constraints-3.9.txt | 6 + .../v2/tests/__init__.py | 16 + .../v2/tests/unit/__init__.py | 16 + .../v2/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/servicecontrol_v2/__init__.py | 16 + .../test_service_controller.py | 2284 ++ .../v1/.coveragerc | 13 + .../v1/.flake8 | 33 + .../v1/MANIFEST.in | 2 + .../v1/README.rst | 49 + .../v1/docs/_static/custom.css | 3 + .../v1/docs/conf.py | 376 + .../v1/docs/index.rst | 7 + .../servicemanagement_v1/service_manager.rst | 10 + .../docs/servicemanagement_v1/services_.rst | 6 + .../v1/docs/servicemanagement_v1/types_.rst | 6 + .../cloud/servicemanagement/__init__.py | 83 + .../cloud/servicemanagement/gapic_version.py | 16 + .../google/cloud/servicemanagement/py.typed | 2 + .../cloud/servicemanagement_v1/__init__.py | 84 + .../servicemanagement_v1/gapic_metadata.json | 223 + .../servicemanagement_v1/gapic_version.py | 16 + .../cloud/servicemanagement_v1/py.typed | 2 + .../servicemanagement_v1/services/__init__.py | 15 + .../services/service_manager/__init__.py | 22 + .../services/service_manager/async_client.py | 2332 ++ .../services/service_manager/client.py | 2637 +++ .../services/service_manager/pagers.py | 434 + .../service_manager/transports/README.rst | 9 + .../service_manager/transports/__init__.py | 38 + .../service_manager/transports/base.py | 395 + .../service_manager/transports/grpc.py | 784 + .../transports/grpc_asyncio.py | 885 + .../service_manager/transports/rest.py | 2064 ++ .../service_manager/transports/rest_base.py | 753 + .../servicemanagement_v1/types/__init__.py | 78 + .../servicemanagement_v1/types/resources.py | 490 + .../types/servicemanager.py | 576 + .../v1/mypy.ini | 3 + .../v1/noxfile.py | 280 + ...ed_service_manager_create_service_async.py | 55 + ...ice_manager_create_service_config_async.py | 52 + ...vice_manager_create_service_config_sync.py | 52 + ...ce_manager_create_service_rollout_async.py | 56 + ...ice_manager_create_service_rollout_sync.py | 56 + ...ted_service_manager_create_service_sync.py | 55 + ...ed_service_manager_delete_service_async.py | 56 + ...ted_service_manager_delete_service_sync.py | 56 + ...ce_manager_generate_config_report_async.py | 51 + ...ice_manager_generate_config_report_sync.py | 51 + ...rated_service_manager_get_service_async.py | 52 + ...ervice_manager_get_service_config_async.py | 53 + ...service_manager_get_service_config_sync.py | 53 + ...rvice_manager_get_service_rollout_async.py | 53 + ...ervice_manager_get_service_rollout_sync.py | 53 + ...erated_service_manager_get_service_sync.py | 52 + ...vice_manager_list_service_configs_async.py | 53 + ...rvice_manager_list_service_configs_sync.py | 53 + ...ice_manager_list_service_rollouts_async.py | 54 + ...vice_manager_list_service_rollouts_sync.py | 54 + ...ted_service_manager_list_services_async.py | 52 + ...ated_service_manager_list_services_sync.py | 52 + ...vice_manager_submit_config_source_async.py | 56 + ...rvice_manager_submit_config_source_sync.py | 56 + ..._service_manager_undelete_service_async.py | 56 + ...d_service_manager_undelete_service_sync.py | 56 + ...adata_google.api.servicemanagement.v1.json | 2188 ++ .../fixup_servicemanagement_v1_keywords.py | 188 + .../v1/setup.py | 99 + .../v1/testing/constraints-3.10.txt | 7 + .../v1/testing/constraints-3.11.txt | 7 + .../v1/testing/constraints-3.12.txt | 7 + .../v1/testing/constraints-3.13.txt | 7 + .../v1/testing/constraints-3.7.txt | 11 + .../v1/testing/constraints-3.8.txt | 7 + .../v1/testing/constraints-3.9.txt | 7 + .../v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../gapic/servicemanagement_v1/__init__.py | 16 + .../test_service_manager.py | 11683 ++++++++++ .../google-cloud-service-usage/v1/.coveragerc | 13 + .../google-cloud-service-usage/v1/.flake8 | 33 + .../google-cloud-service-usage/v1/MANIFEST.in | 2 + .../google-cloud-service-usage/v1/README.rst | 49 + .../v1/docs/_static/custom.css | 3 + .../v1/docs/conf.py | 376 + .../v1/docs/index.rst | 7 + .../docs/service_usage_v1/service_usage.rst | 10 + .../v1/docs/service_usage_v1/services_.rst | 6 + .../v1/docs/service_usage_v1/types_.rst | 6 + .../v1/google/cloud/service_usage/__init__.py | 57 + .../cloud/service_usage/gapic_version.py | 16 + .../v1/google/cloud/service_usage/py.typed | 2 + .../google/cloud/service_usage_v1/__init__.py | 58 + .../service_usage_v1/gapic_metadata.json | 118 + .../cloud/service_usage_v1/gapic_version.py | 16 + .../v1/google/cloud/service_usage_v1/py.typed | 2 + .../service_usage_v1/services/__init__.py | 15 + .../services/service_usage/__init__.py | 22 + .../services/service_usage/async_client.py | 920 + .../services/service_usage/client.py | 1252 ++ .../services/service_usage/pagers.py | 163 + .../service_usage/transports/README.rst | 9 + .../service_usage/transports/__init__.py | 38 + .../services/service_usage/transports/base.py | 262 + .../services/service_usage/transports/grpc.py | 487 + .../service_usage/transports/grpc_asyncio.py | 543 + .../services/service_usage/transports/rest.py | 1040 + .../service_usage/transports/rest_base.py | 347 + .../cloud/service_usage_v1/types/__init__.py | 52 + .../cloud/service_usage_v1/types/resources.py | 220 + .../service_usage_v1/types/serviceusage.py | 377 + .../google-cloud-service-usage/v1/mypy.ini | 3 + .../google-cloud-service-usage/v1/noxfile.py | 280 + ...rvice_usage_batch_enable_services_async.py | 55 + ...ervice_usage_batch_enable_services_sync.py | 55 + ..._service_usage_batch_get_services_async.py | 51 + ...d_service_usage_batch_get_services_sync.py | 51 + ...ted_service_usage_disable_service_async.py | 55 + ...ated_service_usage_disable_service_sync.py | 55 + ...ated_service_usage_enable_service_async.py | 55 + ...rated_service_usage_enable_service_sync.py | 55 + ...nerated_service_usage_get_service_async.py | 51 + ...enerated_service_usage_get_service_sync.py | 51 + ...rated_service_usage_list_services_async.py | 52 + ...erated_service_usage_list_services_sync.py | 52 + ...t_metadata_google.api.serviceusage.v1.json | 933 + .../fixup_service_usage_v1_keywords.py | 181 + .../google-cloud-service-usage/v1/setup.py | 98 + .../v1/testing/constraints-3.10.txt | 6 + .../v1/testing/constraints-3.11.txt | 6 + .../v1/testing/constraints-3.12.txt | 6 + .../v1/testing/constraints-3.13.txt | 6 + .../v1/testing/constraints-3.7.txt | 10 + .../v1/testing/constraints-3.8.txt | 6 + .../v1/testing/constraints-3.9.txt | 6 + .../v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/service_usage_v1/__init__.py | 16 + .../service_usage_v1/test_service_usage.py | 4761 ++++ 450 files changed, 132979 insertions(+) create mode 100644 owl-bot-staging/google-cloud-dms/v1/.coveragerc create mode 100644 owl-bot-staging/google-cloud-dms/v1/.flake8 create mode 100644 owl-bot-staging/google-cloud-dms/v1/MANIFEST.in create mode 100644 owl-bot-staging/google-cloud-dms/v1/README.rst create mode 100644 owl-bot-staging/google-cloud-dms/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/google-cloud-dms/v1/docs/clouddms_v1/data_migration_service.rst create mode 100644 owl-bot-staging/google-cloud-dms/v1/docs/clouddms_v1/services_.rst create mode 100644 owl-bot-staging/google-cloud-dms/v1/docs/clouddms_v1/types_.rst create mode 100644 owl-bot-staging/google-cloud-dms/v1/docs/conf.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/docs/index.rst create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms/__init__.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms/py.typed create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/py.typed create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/__init__.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/client.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/__init__.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/clouddms.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/clouddms_resources.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/conversionworkspace_resources.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/mypy.ini create mode 100644 owl-bot-staging/google-cloud-dms/v1/noxfile.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_mapping_rule_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_mapping_rule_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_mapping_rule_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_mapping_rule_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_tcp_proxy_script_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_tcp_proxy_script_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_mapping_rule_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_mapping_rule_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_mapping_rules_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_mapping_rules_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_async.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json create mode 100644 owl-bot-staging/google-cloud-dms/v1/scripts/fixup_clouddms_v1_keywords.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/setup.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-cloud-dms/v1/tests/__init__.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/tests/unit/gapic/clouddms_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-dms/v1/tests/unit/gapic/clouddms_v1/test_data_migration_service.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/.coveragerc create mode 100644 owl-bot-staging/google-cloud-quotas/v1/.flake8 create mode 100644 owl-bot-staging/google-cloud-quotas/v1/MANIFEST.in create mode 100644 owl-bot-staging/google-cloud-quotas/v1/README.rst create mode 100644 owl-bot-staging/google-cloud-quotas/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/google-cloud-quotas/v1/docs/cloudquotas_v1/cloud_quotas.rst create mode 100644 owl-bot-staging/google-cloud-quotas/v1/docs/cloudquotas_v1/services_.rst create mode 100644 owl-bot-staging/google-cloud-quotas/v1/docs/cloudquotas_v1/types_.rst create mode 100644 owl-bot-staging/google-cloud-quotas/v1/docs/conf.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/docs/index.rst create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas/__init__.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas/py.typed create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/py.typed create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/__init__.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/__init__.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/async_client.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/client.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/pagers.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/base.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/types/__init__.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/types/cloudquotas.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/types/resources.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/mypy.ini create mode 100644 owl-bot-staging/google-cloud-quotas/v1/noxfile.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_create_quota_preference_async.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_create_quota_preference_sync.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_info_async.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_info_sync.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_preference_async.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_preference_sync.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_infos_async.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_infos_sync.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_async.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_sync.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_update_quota_preference_async.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_update_quota_preference_sync.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/snippet_metadata_google.api.cloudquotas.v1.json create mode 100644 owl-bot-staging/google-cloud-quotas/v1/scripts/fixup_cloudquotas_v1_keywords.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/setup.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-cloud-quotas/v1/tests/__init__.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/tests/unit/gapic/cloudquotas_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-quotas/v1/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/.coveragerc create mode 100644 owl-bot-staging/google-cloud-service-control/v1/.flake8 create mode 100644 owl-bot-staging/google-cloud-service-control/v1/MANIFEST.in create mode 100644 owl-bot-staging/google-cloud-service-control/v1/README.rst create mode 100644 owl-bot-staging/google-cloud-service-control/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/google-cloud-service-control/v1/docs/conf.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/docs/index.rst create mode 100644 owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/quota_controller.rst create mode 100644 owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/service_controller.rst create mode 100644 owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/services_.rst create mode 100644 owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/types_.rst create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol/py.typed create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/py.typed create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/async_client.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/client.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/base.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/async_client.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/client.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/base.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/check_error.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/distribution.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/http_request.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/log_entry.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/metric_value.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/operation.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/quota_controller.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/service_controller.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/mypy.ini create mode 100644 owl-bot-staging/google-cloud-service-control/v1/noxfile.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_quota_controller_allocate_quota_async.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_quota_controller_allocate_quota_sync.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_check_async.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_check_sync.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_report_async.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_report_sync.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v1.json create mode 100644 owl-bot-staging/google-cloud-service-control/v1/scripts/fixup_servicecontrol_v1_keywords.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/setup.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-cloud-service-control/v1/tests/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/servicecontrol_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/servicecontrol_v1/test_quota_controller.py create mode 100644 owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/servicecontrol_v1/test_service_controller.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/.coveragerc create mode 100644 owl-bot-staging/google-cloud-service-control/v2/.flake8 create mode 100644 owl-bot-staging/google-cloud-service-control/v2/MANIFEST.in create mode 100644 owl-bot-staging/google-cloud-service-control/v2/README.rst create mode 100644 owl-bot-staging/google-cloud-service-control/v2/docs/_static/custom.css create mode 100644 owl-bot-staging/google-cloud-service-control/v2/docs/conf.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/docs/index.rst create mode 100644 owl-bot-staging/google-cloud-service-control/v2/docs/servicecontrol_v2/service_controller.rst create mode 100644 owl-bot-staging/google-cloud-service-control/v2/docs/servicecontrol_v2/services_.rst create mode 100644 owl-bot-staging/google-cloud-service-control/v2/docs/servicecontrol_v2/types_.rst create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol/py.typed create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/py.typed create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/async_client.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/client.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/base.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/types/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/types/service_controller.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/mypy.ini create mode 100644 owl-bot-staging/google-cloud-service-control/v2/noxfile.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_check_async.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_check_sync.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_report_async.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_report_sync.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v2.json create mode 100644 owl-bot-staging/google-cloud-service-control/v2/scripts/fixup_servicecontrol_v2_keywords.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/setup.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-cloud-service-control/v2/tests/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/tests/unit/gapic/servicecontrol_v2/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-control/v2/tests/unit/gapic/servicecontrol_v2/test_service_controller.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/.coveragerc create mode 100644 owl-bot-staging/google-cloud-service-management/v1/.flake8 create mode 100644 owl-bot-staging/google-cloud-service-management/v1/MANIFEST.in create mode 100644 owl-bot-staging/google-cloud-service-management/v1/README.rst create mode 100644 owl-bot-staging/google-cloud-service-management/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/google-cloud-service-management/v1/docs/conf.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/docs/index.rst create mode 100644 owl-bot-staging/google-cloud-service-management/v1/docs/servicemanagement_v1/service_manager.rst create mode 100644 owl-bot-staging/google-cloud-service-management/v1/docs/servicemanagement_v1/services_.rst create mode 100644 owl-bot-staging/google-cloud-service-management/v1/docs/servicemanagement_v1/types_.rst create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement/py.typed create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/py.typed create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/async_client.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/client.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/pagers.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/base.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/types/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/types/resources.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/types/servicemanager.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/mypy.ini create mode 100644 owl-bot-staging/google-cloud-service-management/v1/noxfile.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_async.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_config_async.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_config_sync.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_rollout_async.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_rollout_sync.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_sync.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_delete_service_async.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_delete_service_sync.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_generate_config_report_async.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_generate_config_report_sync.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_async.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_config_async.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_config_sync.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_rollout_async.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_rollout_sync.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_sync.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_configs_async.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_configs_sync.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_rollouts_async.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_rollouts_sync.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_services_async.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_services_sync.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_submit_config_source_async.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_submit_config_source_sync.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_undelete_service_async.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_undelete_service_sync.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json create mode 100644 owl-bot-staging/google-cloud-service-management/v1/scripts/fixup_servicemanagement_v1_keywords.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/setup.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-cloud-service-management/v1/tests/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/tests/unit/gapic/servicemanagement_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-management/v1/tests/unit/gapic/servicemanagement_v1/test_service_manager.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/.coveragerc create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/.flake8 create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/MANIFEST.in create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/README.rst create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/docs/conf.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/docs/index.rst create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/docs/service_usage_v1/service_usage.rst create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/docs/service_usage_v1/services_.rst create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/docs/service_usage_v1/types_.rst create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage/py.typed create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/py.typed create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/async_client.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/client.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/pagers.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/base.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/types/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/types/resources.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/types/serviceusage.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/mypy.ini create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/noxfile.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_enable_services_async.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_enable_services_sync.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_get_services_async.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_get_services_sync.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_disable_service_async.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_disable_service_sync.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_enable_service_async.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_enable_service_sync.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_get_service_async.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_get_service_sync.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_list_services_async.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_list_services_sync.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/snippet_metadata_google.api.serviceusage.v1.json create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/scripts/fixup_service_usage_v1_keywords.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/setup.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/tests/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/tests/unit/gapic/service_usage_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-service-usage/v1/tests/unit/gapic/service_usage_v1/test_service_usage.py diff --git a/owl-bot-staging/google-cloud-dms/v1/.coveragerc b/owl-bot-staging/google-cloud-dms/v1/.coveragerc new file mode 100644 index 000000000000..437b0aafb8cb --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/clouddms/__init__.py + google/cloud/clouddms/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-cloud-dms/v1/.flake8 b/owl-bot-staging/google-cloud-dms/v1/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-cloud-dms/v1/MANIFEST.in b/owl-bot-staging/google-cloud-dms/v1/MANIFEST.in new file mode 100644 index 000000000000..b318e50d1cad --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/clouddms *.py +recursive-include google/cloud/clouddms_v1 *.py diff --git a/owl-bot-staging/google-cloud-dms/v1/README.rst b/owl-bot-staging/google-cloud-dms/v1/README.rst new file mode 100644 index 000000000000..94ac6f55eea9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Clouddms API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Clouddms API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-dms/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-dms/v1/docs/_static/custom.css new file mode 100644 index 000000000000..06423be0b592 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/google-cloud-dms/v1/docs/clouddms_v1/data_migration_service.rst b/owl-bot-staging/google-cloud-dms/v1/docs/clouddms_v1/data_migration_service.rst new file mode 100644 index 000000000000..86f0b88cffc6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/docs/clouddms_v1/data_migration_service.rst @@ -0,0 +1,10 @@ +DataMigrationService +-------------------------------------- + +.. automodule:: google.cloud.clouddms_v1.services.data_migration_service + :members: + :inherited-members: + +.. automodule:: google.cloud.clouddms_v1.services.data_migration_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dms/v1/docs/clouddms_v1/services_.rst b/owl-bot-staging/google-cloud-dms/v1/docs/clouddms_v1/services_.rst new file mode 100644 index 000000000000..89359f2a882b --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/docs/clouddms_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Clouddms v1 API +========================================= +.. toctree:: + :maxdepth: 2 + + data_migration_service diff --git a/owl-bot-staging/google-cloud-dms/v1/docs/clouddms_v1/types_.rst b/owl-bot-staging/google-cloud-dms/v1/docs/clouddms_v1/types_.rst new file mode 100644 index 000000000000..26b87db57122 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/docs/clouddms_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Clouddms v1 API +====================================== + +.. automodule:: google.cloud.clouddms_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-cloud-dms/v1/docs/conf.py b/owl-bot-staging/google-cloud-dms/v1/docs/conf.py new file mode 100644 index 000000000000..c0ea5b2401c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-dms documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-dms" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-dms-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-dms.tex", + u"google-cloud-dms Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-dms", + u"Google Cloud Clouddms Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-dms", + u"google-cloud-dms Documentation", + author, + "google-cloud-dms", + "GAPIC library for Google Cloud Clouddms API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-dms/v1/docs/index.rst b/owl-bot-staging/google-cloud-dms/v1/docs/index.rst new file mode 100644 index 000000000000..422577182635 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + clouddms_v1/services_ + clouddms_v1/types_ diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms/__init__.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms/__init__.py new file mode 100644 index 000000000000..d77bb55b521e --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms/__init__.py @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.clouddms import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.clouddms_v1.services.data_migration_service.client import DataMigrationServiceClient +from google.cloud.clouddms_v1.services.data_migration_service.async_client import DataMigrationServiceAsyncClient + +from google.cloud.clouddms_v1.types.clouddms import ApplyConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import CommitConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import ConvertConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import CreateConnectionProfileRequest +from google.cloud.clouddms_v1.types.clouddms import CreateConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import CreateMappingRuleRequest +from google.cloud.clouddms_v1.types.clouddms import CreateMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import CreatePrivateConnectionRequest +from google.cloud.clouddms_v1.types.clouddms import DeleteConnectionProfileRequest +from google.cloud.clouddms_v1.types.clouddms import DeleteConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import DeleteMappingRuleRequest +from google.cloud.clouddms_v1.types.clouddms import DeleteMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import DeletePrivateConnectionRequest +from google.cloud.clouddms_v1.types.clouddms import DescribeConversionWorkspaceRevisionsRequest +from google.cloud.clouddms_v1.types.clouddms import DescribeConversionWorkspaceRevisionsResponse +from google.cloud.clouddms_v1.types.clouddms import DescribeDatabaseEntitiesRequest +from google.cloud.clouddms_v1.types.clouddms import DescribeDatabaseEntitiesResponse +from google.cloud.clouddms_v1.types.clouddms import FetchStaticIpsRequest +from google.cloud.clouddms_v1.types.clouddms import FetchStaticIpsResponse +from google.cloud.clouddms_v1.types.clouddms import GenerateSshScriptRequest +from google.cloud.clouddms_v1.types.clouddms import GenerateTcpProxyScriptRequest +from google.cloud.clouddms_v1.types.clouddms import GetConnectionProfileRequest +from google.cloud.clouddms_v1.types.clouddms import GetConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import GetMappingRuleRequest +from google.cloud.clouddms_v1.types.clouddms import GetMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import GetPrivateConnectionRequest +from google.cloud.clouddms_v1.types.clouddms import ImportMappingRulesRequest +from google.cloud.clouddms_v1.types.clouddms import ListConnectionProfilesRequest +from google.cloud.clouddms_v1.types.clouddms import ListConnectionProfilesResponse +from google.cloud.clouddms_v1.types.clouddms import ListConversionWorkspacesRequest +from google.cloud.clouddms_v1.types.clouddms import ListConversionWorkspacesResponse +from google.cloud.clouddms_v1.types.clouddms import ListMappingRulesRequest +from google.cloud.clouddms_v1.types.clouddms import ListMappingRulesResponse +from google.cloud.clouddms_v1.types.clouddms import ListMigrationJobsRequest +from google.cloud.clouddms_v1.types.clouddms import ListMigrationJobsResponse +from google.cloud.clouddms_v1.types.clouddms import ListPrivateConnectionsRequest +from google.cloud.clouddms_v1.types.clouddms import ListPrivateConnectionsResponse +from google.cloud.clouddms_v1.types.clouddms import OperationMetadata +from google.cloud.clouddms_v1.types.clouddms import PromoteMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import RestartMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import ResumeMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import RollbackConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import SearchBackgroundJobsRequest +from google.cloud.clouddms_v1.types.clouddms import SearchBackgroundJobsResponse +from google.cloud.clouddms_v1.types.clouddms import SeedConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import SshScript +from google.cloud.clouddms_v1.types.clouddms import StartMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import StopMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import TcpProxyScript +from google.cloud.clouddms_v1.types.clouddms import UpdateConnectionProfileRequest +from google.cloud.clouddms_v1.types.clouddms import UpdateConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import UpdateMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import VerifyMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import VmCreationConfig +from google.cloud.clouddms_v1.types.clouddms import VmSelectionConfig +from google.cloud.clouddms_v1.types.clouddms import DatabaseEntityView +from google.cloud.clouddms_v1.types.clouddms_resources import AlloyDbConnectionProfile +from google.cloud.clouddms_v1.types.clouddms_resources import AlloyDbSettings +from google.cloud.clouddms_v1.types.clouddms_resources import CloudSqlConnectionProfile +from google.cloud.clouddms_v1.types.clouddms_resources import CloudSqlSettings +from google.cloud.clouddms_v1.types.clouddms_resources import ConnectionProfile +from google.cloud.clouddms_v1.types.clouddms_resources import ConversionWorkspaceInfo +from google.cloud.clouddms_v1.types.clouddms_resources import DatabaseType +from google.cloud.clouddms_v1.types.clouddms_resources import ForwardSshTunnelConnectivity +from google.cloud.clouddms_v1.types.clouddms_resources import MigrationJob +from google.cloud.clouddms_v1.types.clouddms_resources import MigrationJobVerificationError +from google.cloud.clouddms_v1.types.clouddms_resources import MySqlConnectionProfile +from google.cloud.clouddms_v1.types.clouddms_resources import OracleConnectionProfile +from google.cloud.clouddms_v1.types.clouddms_resources import PostgreSqlConnectionProfile +from google.cloud.clouddms_v1.types.clouddms_resources import PrivateConnection +from google.cloud.clouddms_v1.types.clouddms_resources import PrivateConnectivity +from google.cloud.clouddms_v1.types.clouddms_resources import PrivateServiceConnectConnectivity +from google.cloud.clouddms_v1.types.clouddms_resources import ReverseSshConnectivity +from google.cloud.clouddms_v1.types.clouddms_resources import SqlAclEntry +from google.cloud.clouddms_v1.types.clouddms_resources import SqlIpConfig +from google.cloud.clouddms_v1.types.clouddms_resources import SslConfig +from google.cloud.clouddms_v1.types.clouddms_resources import StaticIpConnectivity +from google.cloud.clouddms_v1.types.clouddms_resources import StaticServiceIpConnectivity +from google.cloud.clouddms_v1.types.clouddms_resources import VpcPeeringConfig +from google.cloud.clouddms_v1.types.clouddms_resources import VpcPeeringConnectivity +from google.cloud.clouddms_v1.types.clouddms_resources import DatabaseEngine +from google.cloud.clouddms_v1.types.clouddms_resources import DatabaseProvider +from google.cloud.clouddms_v1.types.clouddms_resources import NetworkArchitecture +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ApplyHash +from google.cloud.clouddms_v1.types.conversionworkspace_resources import AssignSpecificValue +from google.cloud.clouddms_v1.types.conversionworkspace_resources import BackgroundJobLogEntry +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ColumnEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ConditionalColumnSetValue +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ConstraintEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ConversionWorkspace +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ConvertRowIdToColumn +from google.cloud.clouddms_v1.types.conversionworkspace_resources import DatabaseEngineInfo +from google.cloud.clouddms_v1.types.conversionworkspace_resources import DatabaseEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import DatabaseInstanceEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import DoubleComparisonFilter +from google.cloud.clouddms_v1.types.conversionworkspace_resources import EntityDdl +from google.cloud.clouddms_v1.types.conversionworkspace_resources import EntityIssue +from google.cloud.clouddms_v1.types.conversionworkspace_resources import EntityMapping +from google.cloud.clouddms_v1.types.conversionworkspace_resources import EntityMappingLogEntry +from google.cloud.clouddms_v1.types.conversionworkspace_resources import EntityMove +from google.cloud.clouddms_v1.types.conversionworkspace_resources import FilterTableColumns +from google.cloud.clouddms_v1.types.conversionworkspace_resources import FunctionEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import IndexEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import IntComparisonFilter +from google.cloud.clouddms_v1.types.conversionworkspace_resources import MappingRule +from google.cloud.clouddms_v1.types.conversionworkspace_resources import MappingRuleFilter +from google.cloud.clouddms_v1.types.conversionworkspace_resources import MaterializedViewEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import MultiColumnDatatypeChange +from google.cloud.clouddms_v1.types.conversionworkspace_resources import MultiEntityRename +from google.cloud.clouddms_v1.types.conversionworkspace_resources import PackageEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import RoundToScale +from google.cloud.clouddms_v1.types.conversionworkspace_resources import SchemaEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import SequenceEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import SetTablePrimaryKey +from google.cloud.clouddms_v1.types.conversionworkspace_resources import SingleColumnChange +from google.cloud.clouddms_v1.types.conversionworkspace_resources import SingleEntityRename +from google.cloud.clouddms_v1.types.conversionworkspace_resources import SinglePackageChange +from google.cloud.clouddms_v1.types.conversionworkspace_resources import SourceNumericFilter +from google.cloud.clouddms_v1.types.conversionworkspace_resources import SourceSqlChange +from google.cloud.clouddms_v1.types.conversionworkspace_resources import SourceTextFilter +from google.cloud.clouddms_v1.types.conversionworkspace_resources import StoredProcedureEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import SynonymEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import TableEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import TriggerEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import UDTEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ValueListFilter +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ValueTransformation +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ViewEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import BackgroundJobType +from google.cloud.clouddms_v1.types.conversionworkspace_resources import DatabaseEntityType +from google.cloud.clouddms_v1.types.conversionworkspace_resources import EntityNameTransformation +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ImportRulesFileFormat +from google.cloud.clouddms_v1.types.conversionworkspace_resources import NumericFilterOption +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ValueComparison +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ValuePresentInList + +__all__ = ('DataMigrationServiceClient', + 'DataMigrationServiceAsyncClient', + 'ApplyConversionWorkspaceRequest', + 'CommitConversionWorkspaceRequest', + 'ConvertConversionWorkspaceRequest', + 'CreateConnectionProfileRequest', + 'CreateConversionWorkspaceRequest', + 'CreateMappingRuleRequest', + 'CreateMigrationJobRequest', + 'CreatePrivateConnectionRequest', + 'DeleteConnectionProfileRequest', + 'DeleteConversionWorkspaceRequest', + 'DeleteMappingRuleRequest', + 'DeleteMigrationJobRequest', + 'DeletePrivateConnectionRequest', + 'DescribeConversionWorkspaceRevisionsRequest', + 'DescribeConversionWorkspaceRevisionsResponse', + 'DescribeDatabaseEntitiesRequest', + 'DescribeDatabaseEntitiesResponse', + 'FetchStaticIpsRequest', + 'FetchStaticIpsResponse', + 'GenerateSshScriptRequest', + 'GenerateTcpProxyScriptRequest', + 'GetConnectionProfileRequest', + 'GetConversionWorkspaceRequest', + 'GetMappingRuleRequest', + 'GetMigrationJobRequest', + 'GetPrivateConnectionRequest', + 'ImportMappingRulesRequest', + 'ListConnectionProfilesRequest', + 'ListConnectionProfilesResponse', + 'ListConversionWorkspacesRequest', + 'ListConversionWorkspacesResponse', + 'ListMappingRulesRequest', + 'ListMappingRulesResponse', + 'ListMigrationJobsRequest', + 'ListMigrationJobsResponse', + 'ListPrivateConnectionsRequest', + 'ListPrivateConnectionsResponse', + 'OperationMetadata', + 'PromoteMigrationJobRequest', + 'RestartMigrationJobRequest', + 'ResumeMigrationJobRequest', + 'RollbackConversionWorkspaceRequest', + 'SearchBackgroundJobsRequest', + 'SearchBackgroundJobsResponse', + 'SeedConversionWorkspaceRequest', + 'SshScript', + 'StartMigrationJobRequest', + 'StopMigrationJobRequest', + 'TcpProxyScript', + 'UpdateConnectionProfileRequest', + 'UpdateConversionWorkspaceRequest', + 'UpdateMigrationJobRequest', + 'VerifyMigrationJobRequest', + 'VmCreationConfig', + 'VmSelectionConfig', + 'DatabaseEntityView', + 'AlloyDbConnectionProfile', + 'AlloyDbSettings', + 'CloudSqlConnectionProfile', + 'CloudSqlSettings', + 'ConnectionProfile', + 'ConversionWorkspaceInfo', + 'DatabaseType', + 'ForwardSshTunnelConnectivity', + 'MigrationJob', + 'MigrationJobVerificationError', + 'MySqlConnectionProfile', + 'OracleConnectionProfile', + 'PostgreSqlConnectionProfile', + 'PrivateConnection', + 'PrivateConnectivity', + 'PrivateServiceConnectConnectivity', + 'ReverseSshConnectivity', + 'SqlAclEntry', + 'SqlIpConfig', + 'SslConfig', + 'StaticIpConnectivity', + 'StaticServiceIpConnectivity', + 'VpcPeeringConfig', + 'VpcPeeringConnectivity', + 'DatabaseEngine', + 'DatabaseProvider', + 'NetworkArchitecture', + 'ApplyHash', + 'AssignSpecificValue', + 'BackgroundJobLogEntry', + 'ColumnEntity', + 'ConditionalColumnSetValue', + 'ConstraintEntity', + 'ConversionWorkspace', + 'ConvertRowIdToColumn', + 'DatabaseEngineInfo', + 'DatabaseEntity', + 'DatabaseInstanceEntity', + 'DoubleComparisonFilter', + 'EntityDdl', + 'EntityIssue', + 'EntityMapping', + 'EntityMappingLogEntry', + 'EntityMove', + 'FilterTableColumns', + 'FunctionEntity', + 'IndexEntity', + 'IntComparisonFilter', + 'MappingRule', + 'MappingRuleFilter', + 'MaterializedViewEntity', + 'MultiColumnDatatypeChange', + 'MultiEntityRename', + 'PackageEntity', + 'RoundToScale', + 'SchemaEntity', + 'SequenceEntity', + 'SetTablePrimaryKey', + 'SingleColumnChange', + 'SingleEntityRename', + 'SinglePackageChange', + 'SourceNumericFilter', + 'SourceSqlChange', + 'SourceTextFilter', + 'StoredProcedureEntity', + 'SynonymEntity', + 'TableEntity', + 'TriggerEntity', + 'UDTEntity', + 'ValueListFilter', + 'ValueTransformation', + 'ViewEntity', + 'BackgroundJobType', + 'DatabaseEntityType', + 'EntityNameTransformation', + 'ImportRulesFileFormat', + 'NumericFilterOption', + 'ValueComparison', + 'ValuePresentInList', +) diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms/gapic_version.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms/py.typed b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms/py.typed new file mode 100644 index 000000000000..d368a6212c87 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dms package uses inline types. diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/__init__.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/__init__.py new file mode 100644 index 000000000000..562703aec59a --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/__init__.py @@ -0,0 +1,298 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.clouddms_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.data_migration_service import DataMigrationServiceClient +from .services.data_migration_service import DataMigrationServiceAsyncClient + +from .types.clouddms import ApplyConversionWorkspaceRequest +from .types.clouddms import CommitConversionWorkspaceRequest +from .types.clouddms import ConvertConversionWorkspaceRequest +from .types.clouddms import CreateConnectionProfileRequest +from .types.clouddms import CreateConversionWorkspaceRequest +from .types.clouddms import CreateMappingRuleRequest +from .types.clouddms import CreateMigrationJobRequest +from .types.clouddms import CreatePrivateConnectionRequest +from .types.clouddms import DeleteConnectionProfileRequest +from .types.clouddms import DeleteConversionWorkspaceRequest +from .types.clouddms import DeleteMappingRuleRequest +from .types.clouddms import DeleteMigrationJobRequest +from .types.clouddms import DeletePrivateConnectionRequest +from .types.clouddms import DescribeConversionWorkspaceRevisionsRequest +from .types.clouddms import DescribeConversionWorkspaceRevisionsResponse +from .types.clouddms import DescribeDatabaseEntitiesRequest +from .types.clouddms import DescribeDatabaseEntitiesResponse +from .types.clouddms import FetchStaticIpsRequest +from .types.clouddms import FetchStaticIpsResponse +from .types.clouddms import GenerateSshScriptRequest +from .types.clouddms import GenerateTcpProxyScriptRequest +from .types.clouddms import GetConnectionProfileRequest +from .types.clouddms import GetConversionWorkspaceRequest +from .types.clouddms import GetMappingRuleRequest +from .types.clouddms import GetMigrationJobRequest +from .types.clouddms import GetPrivateConnectionRequest +from .types.clouddms import ImportMappingRulesRequest +from .types.clouddms import ListConnectionProfilesRequest +from .types.clouddms import ListConnectionProfilesResponse +from .types.clouddms import ListConversionWorkspacesRequest +from .types.clouddms import ListConversionWorkspacesResponse +from .types.clouddms import ListMappingRulesRequest +from .types.clouddms import ListMappingRulesResponse +from .types.clouddms import ListMigrationJobsRequest +from .types.clouddms import ListMigrationJobsResponse +from .types.clouddms import ListPrivateConnectionsRequest +from .types.clouddms import ListPrivateConnectionsResponse +from .types.clouddms import OperationMetadata +from .types.clouddms import PromoteMigrationJobRequest +from .types.clouddms import RestartMigrationJobRequest +from .types.clouddms import ResumeMigrationJobRequest +from .types.clouddms import RollbackConversionWorkspaceRequest +from .types.clouddms import SearchBackgroundJobsRequest +from .types.clouddms import SearchBackgroundJobsResponse +from .types.clouddms import SeedConversionWorkspaceRequest +from .types.clouddms import SshScript +from .types.clouddms import StartMigrationJobRequest +from .types.clouddms import StopMigrationJobRequest +from .types.clouddms import TcpProxyScript +from .types.clouddms import UpdateConnectionProfileRequest +from .types.clouddms import UpdateConversionWorkspaceRequest +from .types.clouddms import UpdateMigrationJobRequest +from .types.clouddms import VerifyMigrationJobRequest +from .types.clouddms import VmCreationConfig +from .types.clouddms import VmSelectionConfig +from .types.clouddms import DatabaseEntityView +from .types.clouddms_resources import AlloyDbConnectionProfile +from .types.clouddms_resources import AlloyDbSettings +from .types.clouddms_resources import CloudSqlConnectionProfile +from .types.clouddms_resources import CloudSqlSettings +from .types.clouddms_resources import ConnectionProfile +from .types.clouddms_resources import ConversionWorkspaceInfo +from .types.clouddms_resources import DatabaseType +from .types.clouddms_resources import ForwardSshTunnelConnectivity +from .types.clouddms_resources import MigrationJob +from .types.clouddms_resources import MigrationJobVerificationError +from .types.clouddms_resources import MySqlConnectionProfile +from .types.clouddms_resources import OracleConnectionProfile +from .types.clouddms_resources import PostgreSqlConnectionProfile +from .types.clouddms_resources import PrivateConnection +from .types.clouddms_resources import PrivateConnectivity +from .types.clouddms_resources import PrivateServiceConnectConnectivity +from .types.clouddms_resources import ReverseSshConnectivity +from .types.clouddms_resources import SqlAclEntry +from .types.clouddms_resources import SqlIpConfig +from .types.clouddms_resources import SslConfig +from .types.clouddms_resources import StaticIpConnectivity +from .types.clouddms_resources import StaticServiceIpConnectivity +from .types.clouddms_resources import VpcPeeringConfig +from .types.clouddms_resources import VpcPeeringConnectivity +from .types.clouddms_resources import DatabaseEngine +from .types.clouddms_resources import DatabaseProvider +from .types.clouddms_resources import NetworkArchitecture +from .types.conversionworkspace_resources import ApplyHash +from .types.conversionworkspace_resources import AssignSpecificValue +from .types.conversionworkspace_resources import BackgroundJobLogEntry +from .types.conversionworkspace_resources import ColumnEntity +from .types.conversionworkspace_resources import ConditionalColumnSetValue +from .types.conversionworkspace_resources import ConstraintEntity +from .types.conversionworkspace_resources import ConversionWorkspace +from .types.conversionworkspace_resources import ConvertRowIdToColumn +from .types.conversionworkspace_resources import DatabaseEngineInfo +from .types.conversionworkspace_resources import DatabaseEntity +from .types.conversionworkspace_resources import DatabaseInstanceEntity +from .types.conversionworkspace_resources import DoubleComparisonFilter +from .types.conversionworkspace_resources import EntityDdl +from .types.conversionworkspace_resources import EntityIssue +from .types.conversionworkspace_resources import EntityMapping +from .types.conversionworkspace_resources import EntityMappingLogEntry +from .types.conversionworkspace_resources import EntityMove +from .types.conversionworkspace_resources import FilterTableColumns +from .types.conversionworkspace_resources import FunctionEntity +from .types.conversionworkspace_resources import IndexEntity +from .types.conversionworkspace_resources import IntComparisonFilter +from .types.conversionworkspace_resources import MappingRule +from .types.conversionworkspace_resources import MappingRuleFilter +from .types.conversionworkspace_resources import MaterializedViewEntity +from .types.conversionworkspace_resources import MultiColumnDatatypeChange +from .types.conversionworkspace_resources import MultiEntityRename +from .types.conversionworkspace_resources import PackageEntity +from .types.conversionworkspace_resources import RoundToScale +from .types.conversionworkspace_resources import SchemaEntity +from .types.conversionworkspace_resources import SequenceEntity +from .types.conversionworkspace_resources import SetTablePrimaryKey +from .types.conversionworkspace_resources import SingleColumnChange +from .types.conversionworkspace_resources import SingleEntityRename +from .types.conversionworkspace_resources import SinglePackageChange +from .types.conversionworkspace_resources import SourceNumericFilter +from .types.conversionworkspace_resources import SourceSqlChange +from .types.conversionworkspace_resources import SourceTextFilter +from .types.conversionworkspace_resources import StoredProcedureEntity +from .types.conversionworkspace_resources import SynonymEntity +from .types.conversionworkspace_resources import TableEntity +from .types.conversionworkspace_resources import TriggerEntity +from .types.conversionworkspace_resources import UDTEntity +from .types.conversionworkspace_resources import ValueListFilter +from .types.conversionworkspace_resources import ValueTransformation +from .types.conversionworkspace_resources import ViewEntity +from .types.conversionworkspace_resources import BackgroundJobType +from .types.conversionworkspace_resources import DatabaseEntityType +from .types.conversionworkspace_resources import EntityNameTransformation +from .types.conversionworkspace_resources import ImportRulesFileFormat +from .types.conversionworkspace_resources import NumericFilterOption +from .types.conversionworkspace_resources import ValueComparison +from .types.conversionworkspace_resources import ValuePresentInList + +__all__ = ( + 'DataMigrationServiceAsyncClient', +'AlloyDbConnectionProfile', +'AlloyDbSettings', +'ApplyConversionWorkspaceRequest', +'ApplyHash', +'AssignSpecificValue', +'BackgroundJobLogEntry', +'BackgroundJobType', +'CloudSqlConnectionProfile', +'CloudSqlSettings', +'ColumnEntity', +'CommitConversionWorkspaceRequest', +'ConditionalColumnSetValue', +'ConnectionProfile', +'ConstraintEntity', +'ConversionWorkspace', +'ConversionWorkspaceInfo', +'ConvertConversionWorkspaceRequest', +'ConvertRowIdToColumn', +'CreateConnectionProfileRequest', +'CreateConversionWorkspaceRequest', +'CreateMappingRuleRequest', +'CreateMigrationJobRequest', +'CreatePrivateConnectionRequest', +'DataMigrationServiceClient', +'DatabaseEngine', +'DatabaseEngineInfo', +'DatabaseEntity', +'DatabaseEntityType', +'DatabaseEntityView', +'DatabaseInstanceEntity', +'DatabaseProvider', +'DatabaseType', +'DeleteConnectionProfileRequest', +'DeleteConversionWorkspaceRequest', +'DeleteMappingRuleRequest', +'DeleteMigrationJobRequest', +'DeletePrivateConnectionRequest', +'DescribeConversionWorkspaceRevisionsRequest', +'DescribeConversionWorkspaceRevisionsResponse', +'DescribeDatabaseEntitiesRequest', +'DescribeDatabaseEntitiesResponse', +'DoubleComparisonFilter', +'EntityDdl', +'EntityIssue', +'EntityMapping', +'EntityMappingLogEntry', +'EntityMove', +'EntityNameTransformation', +'FetchStaticIpsRequest', +'FetchStaticIpsResponse', +'FilterTableColumns', +'ForwardSshTunnelConnectivity', +'FunctionEntity', +'GenerateSshScriptRequest', +'GenerateTcpProxyScriptRequest', +'GetConnectionProfileRequest', +'GetConversionWorkspaceRequest', +'GetMappingRuleRequest', +'GetMigrationJobRequest', +'GetPrivateConnectionRequest', +'ImportMappingRulesRequest', +'ImportRulesFileFormat', +'IndexEntity', +'IntComparisonFilter', +'ListConnectionProfilesRequest', +'ListConnectionProfilesResponse', +'ListConversionWorkspacesRequest', +'ListConversionWorkspacesResponse', +'ListMappingRulesRequest', +'ListMappingRulesResponse', +'ListMigrationJobsRequest', +'ListMigrationJobsResponse', +'ListPrivateConnectionsRequest', +'ListPrivateConnectionsResponse', +'MappingRule', +'MappingRuleFilter', +'MaterializedViewEntity', +'MigrationJob', +'MigrationJobVerificationError', +'MultiColumnDatatypeChange', +'MultiEntityRename', +'MySqlConnectionProfile', +'NetworkArchitecture', +'NumericFilterOption', +'OperationMetadata', +'OracleConnectionProfile', +'PackageEntity', +'PostgreSqlConnectionProfile', +'PrivateConnection', +'PrivateConnectivity', +'PrivateServiceConnectConnectivity', +'PromoteMigrationJobRequest', +'RestartMigrationJobRequest', +'ResumeMigrationJobRequest', +'ReverseSshConnectivity', +'RollbackConversionWorkspaceRequest', +'RoundToScale', +'SchemaEntity', +'SearchBackgroundJobsRequest', +'SearchBackgroundJobsResponse', +'SeedConversionWorkspaceRequest', +'SequenceEntity', +'SetTablePrimaryKey', +'SingleColumnChange', +'SingleEntityRename', +'SinglePackageChange', +'SourceNumericFilter', +'SourceSqlChange', +'SourceTextFilter', +'SqlAclEntry', +'SqlIpConfig', +'SshScript', +'SslConfig', +'StartMigrationJobRequest', +'StaticIpConnectivity', +'StaticServiceIpConnectivity', +'StopMigrationJobRequest', +'StoredProcedureEntity', +'SynonymEntity', +'TableEntity', +'TcpProxyScript', +'TriggerEntity', +'UDTEntity', +'UpdateConnectionProfileRequest', +'UpdateConversionWorkspaceRequest', +'UpdateMigrationJobRequest', +'ValueComparison', +'ValueListFilter', +'ValuePresentInList', +'ValueTransformation', +'VerifyMigrationJobRequest', +'ViewEntity', +'VmCreationConfig', +'VmSelectionConfig', +'VpcPeeringConfig', +'VpcPeeringConnectivity', +) diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/gapic_metadata.json new file mode 100644 index 000000000000..fbe8442f7a7b --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/gapic_metadata.json @@ -0,0 +1,433 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.clouddms_v1", + "protoPackage": "google.cloud.clouddms.v1", + "schema": "1.0", + "services": { + "DataMigrationService": { + "clients": { + "grpc": { + "libraryClient": "DataMigrationServiceClient", + "rpcs": { + "ApplyConversionWorkspace": { + "methods": [ + "apply_conversion_workspace" + ] + }, + "CommitConversionWorkspace": { + "methods": [ + "commit_conversion_workspace" + ] + }, + "ConvertConversionWorkspace": { + "methods": [ + "convert_conversion_workspace" + ] + }, + "CreateConnectionProfile": { + "methods": [ + "create_connection_profile" + ] + }, + "CreateConversionWorkspace": { + "methods": [ + "create_conversion_workspace" + ] + }, + "CreateMappingRule": { + "methods": [ + "create_mapping_rule" + ] + }, + "CreateMigrationJob": { + "methods": [ + "create_migration_job" + ] + }, + "CreatePrivateConnection": { + "methods": [ + "create_private_connection" + ] + }, + "DeleteConnectionProfile": { + "methods": [ + "delete_connection_profile" + ] + }, + "DeleteConversionWorkspace": { + "methods": [ + "delete_conversion_workspace" + ] + }, + "DeleteMappingRule": { + "methods": [ + "delete_mapping_rule" + ] + }, + "DeleteMigrationJob": { + "methods": [ + "delete_migration_job" + ] + }, + "DeletePrivateConnection": { + "methods": [ + "delete_private_connection" + ] + }, + "DescribeConversionWorkspaceRevisions": { + "methods": [ + "describe_conversion_workspace_revisions" + ] + }, + "DescribeDatabaseEntities": { + "methods": [ + "describe_database_entities" + ] + }, + "FetchStaticIps": { + "methods": [ + "fetch_static_ips" + ] + }, + "GenerateSshScript": { + "methods": [ + "generate_ssh_script" + ] + }, + "GenerateTcpProxyScript": { + "methods": [ + "generate_tcp_proxy_script" + ] + }, + "GetConnectionProfile": { + "methods": [ + "get_connection_profile" + ] + }, + "GetConversionWorkspace": { + "methods": [ + "get_conversion_workspace" + ] + }, + "GetMappingRule": { + "methods": [ + "get_mapping_rule" + ] + }, + "GetMigrationJob": { + "methods": [ + "get_migration_job" + ] + }, + "GetPrivateConnection": { + "methods": [ + "get_private_connection" + ] + }, + "ImportMappingRules": { + "methods": [ + "import_mapping_rules" + ] + }, + "ListConnectionProfiles": { + "methods": [ + "list_connection_profiles" + ] + }, + "ListConversionWorkspaces": { + "methods": [ + "list_conversion_workspaces" + ] + }, + "ListMappingRules": { + "methods": [ + "list_mapping_rules" + ] + }, + "ListMigrationJobs": { + "methods": [ + "list_migration_jobs" + ] + }, + "ListPrivateConnections": { + "methods": [ + "list_private_connections" + ] + }, + "PromoteMigrationJob": { + "methods": [ + "promote_migration_job" + ] + }, + "RestartMigrationJob": { + "methods": [ + "restart_migration_job" + ] + }, + "ResumeMigrationJob": { + "methods": [ + "resume_migration_job" + ] + }, + "RollbackConversionWorkspace": { + "methods": [ + "rollback_conversion_workspace" + ] + }, + "SearchBackgroundJobs": { + "methods": [ + "search_background_jobs" + ] + }, + "SeedConversionWorkspace": { + "methods": [ + "seed_conversion_workspace" + ] + }, + "StartMigrationJob": { + "methods": [ + "start_migration_job" + ] + }, + "StopMigrationJob": { + "methods": [ + "stop_migration_job" + ] + }, + "UpdateConnectionProfile": { + "methods": [ + "update_connection_profile" + ] + }, + "UpdateConversionWorkspace": { + "methods": [ + "update_conversion_workspace" + ] + }, + "UpdateMigrationJob": { + "methods": [ + "update_migration_job" + ] + }, + "VerifyMigrationJob": { + "methods": [ + "verify_migration_job" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataMigrationServiceAsyncClient", + "rpcs": { + "ApplyConversionWorkspace": { + "methods": [ + "apply_conversion_workspace" + ] + }, + "CommitConversionWorkspace": { + "methods": [ + "commit_conversion_workspace" + ] + }, + "ConvertConversionWorkspace": { + "methods": [ + "convert_conversion_workspace" + ] + }, + "CreateConnectionProfile": { + "methods": [ + "create_connection_profile" + ] + }, + "CreateConversionWorkspace": { + "methods": [ + "create_conversion_workspace" + ] + }, + "CreateMappingRule": { + "methods": [ + "create_mapping_rule" + ] + }, + "CreateMigrationJob": { + "methods": [ + "create_migration_job" + ] + }, + "CreatePrivateConnection": { + "methods": [ + "create_private_connection" + ] + }, + "DeleteConnectionProfile": { + "methods": [ + "delete_connection_profile" + ] + }, + "DeleteConversionWorkspace": { + "methods": [ + "delete_conversion_workspace" + ] + }, + "DeleteMappingRule": { + "methods": [ + "delete_mapping_rule" + ] + }, + "DeleteMigrationJob": { + "methods": [ + "delete_migration_job" + ] + }, + "DeletePrivateConnection": { + "methods": [ + "delete_private_connection" + ] + }, + "DescribeConversionWorkspaceRevisions": { + "methods": [ + "describe_conversion_workspace_revisions" + ] + }, + "DescribeDatabaseEntities": { + "methods": [ + "describe_database_entities" + ] + }, + "FetchStaticIps": { + "methods": [ + "fetch_static_ips" + ] + }, + "GenerateSshScript": { + "methods": [ + "generate_ssh_script" + ] + }, + "GenerateTcpProxyScript": { + "methods": [ + "generate_tcp_proxy_script" + ] + }, + "GetConnectionProfile": { + "methods": [ + "get_connection_profile" + ] + }, + "GetConversionWorkspace": { + "methods": [ + "get_conversion_workspace" + ] + }, + "GetMappingRule": { + "methods": [ + "get_mapping_rule" + ] + }, + "GetMigrationJob": { + "methods": [ + "get_migration_job" + ] + }, + "GetPrivateConnection": { + "methods": [ + "get_private_connection" + ] + }, + "ImportMappingRules": { + "methods": [ + "import_mapping_rules" + ] + }, + "ListConnectionProfiles": { + "methods": [ + "list_connection_profiles" + ] + }, + "ListConversionWorkspaces": { + "methods": [ + "list_conversion_workspaces" + ] + }, + "ListMappingRules": { + "methods": [ + "list_mapping_rules" + ] + }, + "ListMigrationJobs": { + "methods": [ + "list_migration_jobs" + ] + }, + "ListPrivateConnections": { + "methods": [ + "list_private_connections" + ] + }, + "PromoteMigrationJob": { + "methods": [ + "promote_migration_job" + ] + }, + "RestartMigrationJob": { + "methods": [ + "restart_migration_job" + ] + }, + "ResumeMigrationJob": { + "methods": [ + "resume_migration_job" + ] + }, + "RollbackConversionWorkspace": { + "methods": [ + "rollback_conversion_workspace" + ] + }, + "SearchBackgroundJobs": { + "methods": [ + "search_background_jobs" + ] + }, + "SeedConversionWorkspace": { + "methods": [ + "seed_conversion_workspace" + ] + }, + "StartMigrationJob": { + "methods": [ + "start_migration_job" + ] + }, + "StopMigrationJob": { + "methods": [ + "stop_migration_job" + ] + }, + "UpdateConnectionProfile": { + "methods": [ + "update_connection_profile" + ] + }, + "UpdateConversionWorkspace": { + "methods": [ + "update_conversion_workspace" + ] + }, + "UpdateMigrationJob": { + "methods": [ + "update_migration_job" + ] + }, + "VerifyMigrationJob": { + "methods": [ + "verify_migration_job" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/gapic_version.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/py.typed b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/py.typed new file mode 100644 index 000000000000..d368a6212c87 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dms package uses inline types. diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/__init__.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/__init__.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/__init__.py new file mode 100644 index 000000000000..b4c7e05919f9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DataMigrationServiceClient +from .async_client import DataMigrationServiceAsyncClient + +__all__ = ( + 'DataMigrationServiceClient', + 'DataMigrationServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/async_client.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/async_client.py new file mode 100644 index 000000000000..0f8710f2a7da --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/async_client.py @@ -0,0 +1,5479 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.clouddms_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.clouddms_v1.services.data_migration_service import pagers +from google.cloud.clouddms_v1.types import clouddms +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import DataMigrationServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DataMigrationServiceGrpcAsyncIOTransport +from .client import DataMigrationServiceClient + + +class DataMigrationServiceAsyncClient: + """Database Migration service""" + + _client: DataMigrationServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DataMigrationServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataMigrationServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DataMigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DataMigrationServiceClient._DEFAULT_UNIVERSE + + connection_profile_path = staticmethod(DataMigrationServiceClient.connection_profile_path) + parse_connection_profile_path = staticmethod(DataMigrationServiceClient.parse_connection_profile_path) + conversion_workspace_path = staticmethod(DataMigrationServiceClient.conversion_workspace_path) + parse_conversion_workspace_path = staticmethod(DataMigrationServiceClient.parse_conversion_workspace_path) + mapping_rule_path = staticmethod(DataMigrationServiceClient.mapping_rule_path) + parse_mapping_rule_path = staticmethod(DataMigrationServiceClient.parse_mapping_rule_path) + migration_job_path = staticmethod(DataMigrationServiceClient.migration_job_path) + parse_migration_job_path = staticmethod(DataMigrationServiceClient.parse_migration_job_path) + networks_path = staticmethod(DataMigrationServiceClient.networks_path) + parse_networks_path = staticmethod(DataMigrationServiceClient.parse_networks_path) + private_connection_path = staticmethod(DataMigrationServiceClient.private_connection_path) + parse_private_connection_path = staticmethod(DataMigrationServiceClient.parse_private_connection_path) + common_billing_account_path = staticmethod(DataMigrationServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DataMigrationServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DataMigrationServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(DataMigrationServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(DataMigrationServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(DataMigrationServiceClient.parse_common_organization_path) + common_project_path = staticmethod(DataMigrationServiceClient.common_project_path) + parse_common_project_path = staticmethod(DataMigrationServiceClient.parse_common_project_path) + common_location_path = staticmethod(DataMigrationServiceClient.common_location_path) + parse_common_location_path = staticmethod(DataMigrationServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataMigrationServiceAsyncClient: The constructed client. + """ + return DataMigrationServiceClient.from_service_account_info.__func__(DataMigrationServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataMigrationServiceAsyncClient: The constructed client. + """ + return DataMigrationServiceClient.from_service_account_file.__func__(DataMigrationServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DataMigrationServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DataMigrationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataMigrationServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = DataMigrationServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataMigrationServiceTransport, Callable[..., DataMigrationServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data migration service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataMigrationServiceTransport,Callable[..., DataMigrationServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataMigrationServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataMigrationServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_migration_jobs(self, + request: Optional[Union[clouddms.ListMigrationJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMigrationJobsAsyncPager: + r"""Lists migration jobs in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_list_migration_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListMigrationJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ListMigrationJobsRequest, dict]]): + The request object. Retrieves a list of all migration + jobs in a given project and location. + parent (:class:`str`): + Required. The parent which owns this + collection of migrationJobs. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsAsyncPager: + Response message for + 'ListMigrationJobs' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ListMigrationJobsRequest): + request = clouddms.ListMigrationJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_migration_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMigrationJobsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_migration_job(self, + request: Optional[Union[clouddms.GetMigrationJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.MigrationJob: + r"""Gets details of a single migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_get_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetMigrationJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_migration_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GetMigrationJobRequest, dict]]): + The request object. Request message for 'GetMigrationJob' + request. + name (:class:`str`): + Required. Name of the migration job + resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.MigrationJob: + Represents a Database Migration + Service migration job object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.GetMigrationJobRequest): + request = clouddms.GetMigrationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_migration_job(self, + request: Optional[Union[clouddms.CreateMigrationJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + migration_job: Optional[clouddms_resources.MigrationJob] = None, + migration_job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new migration job in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_create_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.CreateMigrationJobRequest( + parent="parent_value", + migration_job_id="migration_job_id_value", + migration_job=migration_job, + ) + + # Make the request + operation = client.create_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CreateMigrationJobRequest, dict]]): + The request object. Request message to create a new + Database Migration Service migration job + in the specified project and region. + parent (:class:`str`): + Required. The parent which owns this + collection of migration jobs. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + migration_job (:class:`google.cloud.clouddms_v1.types.MigrationJob`): + Required. Represents a `migration + job `__ + object. + + This corresponds to the ``migration_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + migration_job_id (:class:`str`): + Required. The ID of the instance to + create. + + This corresponds to the ``migration_job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, migration_job, migration_job_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.CreateMigrationJobRequest): + request = clouddms.CreateMigrationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if migration_job is not None: + request.migration_job = migration_job + if migration_job_id is not None: + request.migration_job_id = migration_job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_migration_job(self, + request: Optional[Union[clouddms.UpdateMigrationJobRequest, dict]] = None, + *, + migration_job: Optional[clouddms_resources.MigrationJob] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_update_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.UpdateMigrationJobRequest( + migration_job=migration_job, + ) + + # Make the request + operation = client.update_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.UpdateMigrationJobRequest, dict]]): + The request object. Request message for + 'UpdateMigrationJob' request. + migration_job (:class:`google.cloud.clouddms_v1.types.MigrationJob`): + Required. The migration job + parameters to update. + + This corresponds to the ``migration_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([migration_job, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.UpdateMigrationJobRequest): + request = clouddms.UpdateMigrationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if migration_job is not None: + request.migration_job = migration_job + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("migration_job.name", request.migration_job.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_migration_job(self, + request: Optional[Union[clouddms.DeleteMigrationJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_delete_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteMigrationJobRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DeleteMigrationJobRequest, dict]]): + The request object. Request message for + 'DeleteMigrationJob' request. + name (:class:`str`): + Required. Name of the migration job + resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.DeleteMigrationJobRequest): + request = clouddms.DeleteMigrationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def start_migration_job(self, + request: Optional[Union[clouddms.StartMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Start an already created migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_start_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.StartMigrationJobRequest( + ) + + # Make the request + operation = client.start_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.StartMigrationJobRequest, dict]]): + The request object. Request message for + 'StartMigrationJob' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.StartMigrationJobRequest): + request = clouddms.StartMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.start_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def stop_migration_job(self, + request: Optional[Union[clouddms.StopMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Stops a running migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_stop_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.StopMigrationJobRequest( + ) + + # Make the request + operation = client.stop_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.StopMigrationJobRequest, dict]]): + The request object. Request message for + 'StopMigrationJob' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.StopMigrationJobRequest): + request = clouddms.StopMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.stop_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def resume_migration_job(self, + request: Optional[Union[clouddms.ResumeMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Resume a migration job that is currently stopped and + is resumable (was stopped during CDC phase). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_resume_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ResumeMigrationJobRequest( + ) + + # Make the request + operation = client.resume_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ResumeMigrationJobRequest, dict]]): + The request object. Request message for + 'ResumeMigrationJob' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ResumeMigrationJobRequest): + request = clouddms.ResumeMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.resume_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def promote_migration_job(self, + request: Optional[Union[clouddms.PromoteMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Promote a migration job, stopping replication to the + destination and promoting the destination to be a + standalone database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_promote_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.PromoteMigrationJobRequest( + ) + + # Make the request + operation = client.promote_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.PromoteMigrationJobRequest, dict]]): + The request object. Request message for + 'PromoteMigrationJob' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.PromoteMigrationJobRequest): + request = clouddms.PromoteMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.promote_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def verify_migration_job(self, + request: Optional[Union[clouddms.VerifyMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Verify a migration job, making sure the destination + can reach the source and that all configuration and + prerequisites are met. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_verify_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.VerifyMigrationJobRequest( + ) + + # Make the request + operation = client.verify_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.VerifyMigrationJobRequest, dict]]): + The request object. Request message for + 'VerifyMigrationJob' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.VerifyMigrationJobRequest): + request = clouddms.VerifyMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.verify_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def restart_migration_job(self, + request: Optional[Union[clouddms.RestartMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Restart a stopped or failed migration job, resetting + the destination instance to its original state and + starting the migration process from scratch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_restart_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.RestartMigrationJobRequest( + ) + + # Make the request + operation = client.restart_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.RestartMigrationJobRequest, dict]]): + The request object. Request message for + 'RestartMigrationJob' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.RestartMigrationJobRequest): + request = clouddms.RestartMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.restart_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def generate_ssh_script(self, + request: Optional[Union[clouddms.GenerateSshScriptRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.SshScript: + r"""Generate a SSH configuration script to configure the + reverse SSH connectivity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_generate_ssh_script(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + vm_creation_config = clouddms_v1.VmCreationConfig() + vm_creation_config.vm_machine_type = "vm_machine_type_value" + + request = clouddms_v1.GenerateSshScriptRequest( + vm_creation_config=vm_creation_config, + vm="vm_value", + ) + + # Make the request + response = await client.generate_ssh_script(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GenerateSshScriptRequest, dict]]): + The request object. Request message for + 'GenerateSshScript' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.SshScript: + Response message for + 'GenerateSshScript' request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.GenerateSshScriptRequest): + request = clouddms.GenerateSshScriptRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.generate_ssh_script] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("migration_job", request.migration_job), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def generate_tcp_proxy_script(self, + request: Optional[Union[clouddms.GenerateTcpProxyScriptRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.TcpProxyScript: + r"""Generate a TCP Proxy configuration script to + configure a cloud-hosted VM running a TCP Proxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_generate_tcp_proxy_script(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GenerateTcpProxyScriptRequest( + vm_name="vm_name_value", + vm_machine_type="vm_machine_type_value", + vm_subnet="vm_subnet_value", + ) + + # Make the request + response = await client.generate_tcp_proxy_script(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GenerateTcpProxyScriptRequest, dict]]): + The request object. Request message for + 'GenerateTcpProxyScript' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.TcpProxyScript: + Response message for + 'GenerateTcpProxyScript' request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.GenerateTcpProxyScriptRequest): + request = clouddms.GenerateTcpProxyScriptRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.generate_tcp_proxy_script] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("migration_job", request.migration_job), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_connection_profiles(self, + request: Optional[Union[clouddms.ListConnectionProfilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConnectionProfilesAsyncPager: + r"""Retrieves a list of all connection profiles in a + given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_list_connection_profiles(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConnectionProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connection_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ListConnectionProfilesRequest, dict]]): + The request object. Request message for + 'ListConnectionProfiles' request. + parent (:class:`str`): + Required. The parent which owns this + collection of connection profiles. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesAsyncPager: + Response message for + 'ListConnectionProfiles' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ListConnectionProfilesRequest): + request = clouddms.ListConnectionProfilesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_connection_profiles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListConnectionProfilesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_connection_profile(self, + request: Optional[Union[clouddms.GetConnectionProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.ConnectionProfile: + r"""Gets details of a single connection profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_get_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConnectionProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_connection_profile(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GetConnectionProfileRequest, dict]]): + The request object. Request message for + 'GetConnectionProfile' request. + name (:class:`str`): + Required. Name of the connection + profile resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.ConnectionProfile: + A connection profile definition. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.GetConnectionProfileRequest): + request = clouddms.GetConnectionProfileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_connection_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_connection_profile(self, + request: Optional[Union[clouddms.CreateConnectionProfileRequest, dict]] = None, + *, + parent: Optional[str] = None, + connection_profile: Optional[clouddms_resources.ConnectionProfile] = None, + connection_profile_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new connection profile in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_create_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.CreateConnectionProfileRequest( + parent="parent_value", + connection_profile_id="connection_profile_id_value", + connection_profile=connection_profile, + ) + + # Make the request + operation = client.create_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CreateConnectionProfileRequest, dict]]): + The request object. Request message for + 'CreateConnectionProfile' request. + parent (:class:`str`): + Required. The parent which owns this + collection of connection profiles. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_profile (:class:`google.cloud.clouddms_v1.types.ConnectionProfile`): + Required. The create request body + including the connection profile data + + This corresponds to the ``connection_profile`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_profile_id (:class:`str`): + Required. The connection profile + identifier. + + This corresponds to the ``connection_profile_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConnectionProfile` + A connection profile definition. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, connection_profile, connection_profile_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.CreateConnectionProfileRequest): + request = clouddms.CreateConnectionProfileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if connection_profile is not None: + request.connection_profile = connection_profile + if connection_profile_id is not None: + request.connection_profile_id = connection_profile_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_connection_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.ConnectionProfile, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_connection_profile(self, + request: Optional[Union[clouddms.UpdateConnectionProfileRequest, dict]] = None, + *, + connection_profile: Optional[clouddms_resources.ConnectionProfile] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Update the configuration of a single connection + profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_update_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.UpdateConnectionProfileRequest( + connection_profile=connection_profile, + ) + + # Make the request + operation = client.update_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.UpdateConnectionProfileRequest, dict]]): + The request object. Request message for + 'UpdateConnectionProfile' request. + connection_profile (:class:`google.cloud.clouddms_v1.types.ConnectionProfile`): + Required. The connection profile + parameters to update. + + This corresponds to the ``connection_profile`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConnectionProfile` + A connection profile definition. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([connection_profile, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.UpdateConnectionProfileRequest): + request = clouddms.UpdateConnectionProfileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if connection_profile is not None: + request.connection_profile = connection_profile + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_connection_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("connection_profile.name", request.connection_profile.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.ConnectionProfile, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_connection_profile(self, + request: Optional[Union[clouddms.DeleteConnectionProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Database Migration Service + connection profile. A connection profile can only be + deleted if it is not in use by any active migration + jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_delete_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConnectionProfileRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest, dict]]): + The request object. Request message for + 'DeleteConnectionProfile' request. + name (:class:`str`): + Required. Name of the connection + profile resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.DeleteConnectionProfileRequest): + request = clouddms.DeleteConnectionProfileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_connection_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_private_connection(self, + request: Optional[Union[clouddms.CreatePrivateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + private_connection: Optional[clouddms_resources.PrivateConnection] = None, + private_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new private connection in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_create_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + private_connection = clouddms_v1.PrivateConnection() + private_connection.vpc_peering_config.vpc_name = "vpc_name_value" + private_connection.vpc_peering_config.subnet = "subnet_value" + + request = clouddms_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest, dict]]): + The request object. Request message to create a new + private connection in the specified + project and region. + parent (:class:`str`): + Required. The parent that owns the + collection of PrivateConnections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection (:class:`google.cloud.clouddms_v1.types.PrivateConnection`): + Required. The private connection + resource to create. + + This corresponds to the ``private_connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection_id (:class:`str`): + Required. The private connection + identifier. + + This corresponds to the ``private_connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.clouddms_v1.types.PrivateConnection` The PrivateConnection resource is used to establish private connectivity + with the customer's network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, private_connection, private_connection_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.CreatePrivateConnectionRequest): + request = clouddms.CreatePrivateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if private_connection is not None: + request.private_connection = private_connection + if private_connection_id is not None: + request.private_connection_id = private_connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_private_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.PrivateConnection, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_private_connection(self, + request: Optional[Union[clouddms.GetPrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.PrivateConnection: + r"""Gets details of a single private connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_get_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetPrivateConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_private_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GetPrivateConnectionRequest, dict]]): + The request object. Request message to get a private + connection resource. + name (:class:`str`): + Required. The name of the private + connection to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.PrivateConnection: + The PrivateConnection resource is + used to establish private connectivity + with the customer's network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.GetPrivateConnectionRequest): + request = clouddms.GetPrivateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_private_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_private_connections(self, + request: Optional[Union[clouddms.ListPrivateConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPrivateConnectionsAsyncPager: + r"""Retrieves a list of private connections in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_list_private_connections(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListPrivateConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_private_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest, dict]]): + The request object. Request message to retrieve a list of + private connections in a given project + and location. + parent (:class:`str`): + Required. The parent that owns the + collection of private connections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsAsyncPager: + Response message for + 'ListPrivateConnections' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ListPrivateConnectionsRequest): + request = clouddms.ListPrivateConnectionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_private_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPrivateConnectionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_private_connection(self, + request: Optional[Union[clouddms.DeletePrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Database Migration Service private + connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_delete_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeletePrivateConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest, dict]]): + The request object. Request message to delete a private + connection. + name (:class:`str`): + Required. The name of the private + connection to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.DeletePrivateConnectionRequest): + request = clouddms.DeletePrivateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_private_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_conversion_workspace(self, + request: Optional[Union[clouddms.GetConversionWorkspaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversionworkspace_resources.ConversionWorkspace: + r"""Gets details of a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_get_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conversion_workspace(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'GetConversionWorkspace' request. + name (:class:`str`): + Required. Name of the conversion + workspace resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.ConversionWorkspace: + The main conversion workspace + resource entity. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.GetConversionWorkspaceRequest): + request = clouddms.GetConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_conversion_workspaces(self, + request: Optional[Union[clouddms.ListConversionWorkspacesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConversionWorkspacesAsyncPager: + r"""Lists conversion workspaces in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_list_conversion_workspaces(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConversionWorkspacesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversion_workspaces(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest, dict]]): + The request object. Retrieve a list of all conversion + workspaces in a given project and + location. + parent (:class:`str`): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesAsyncPager: + Response message for + 'ListConversionWorkspaces' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ListConversionWorkspacesRequest): + request = clouddms.ListConversionWorkspacesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_conversion_workspaces] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListConversionWorkspacesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_conversion_workspace(self, + request: Optional[Union[clouddms.CreateConversionWorkspaceRequest, dict]] = None, + *, + parent: Optional[str] = None, + conversion_workspace: Optional[conversionworkspace_resources.ConversionWorkspace] = None, + conversion_workspace_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new conversion workspace in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_create_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.CreateConversionWorkspaceRequest( + parent="parent_value", + conversion_workspace_id="conversion_workspace_id_value", + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.create_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest, dict]]): + The request object. Request message to create a new + Conversion Workspace in the specified + project and region. + parent (:class:`str`): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace (:class:`google.cloud.clouddms_v1.types.ConversionWorkspace`): + Required. Represents a conversion + workspace object. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace_id (:class:`str`): + Required. The ID of the conversion + workspace to create. + + This corresponds to the ``conversion_workspace_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, conversion_workspace, conversion_workspace_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.CreateConversionWorkspaceRequest): + request = clouddms.CreateConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if conversion_workspace_id is not None: + request.conversion_workspace_id = conversion_workspace_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_conversion_workspace(self, + request: Optional[Union[clouddms.UpdateConversionWorkspaceRequest, dict]] = None, + *, + conversion_workspace: Optional[conversionworkspace_resources.ConversionWorkspace] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single conversion + workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_update_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.UpdateConversionWorkspaceRequest( + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.update_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'UpdateConversionWorkspace' request. + conversion_workspace (:class:`google.cloud.clouddms_v1.types.ConversionWorkspace`): + Required. The conversion workspace + parameters to update. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([conversion_workspace, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.UpdateConversionWorkspaceRequest): + request = clouddms.UpdateConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace.name", request.conversion_workspace.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_conversion_workspace(self, + request: Optional[Union[clouddms.DeleteConversionWorkspaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_delete_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'DeleteConversionWorkspace' request. + name (:class:`str`): + Required. Name of the conversion + workspace resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.DeleteConversionWorkspaceRequest): + request = clouddms.DeleteConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_mapping_rule(self, + request: Optional[Union[clouddms.CreateMappingRuleRequest, dict]] = None, + *, + parent: Optional[str] = None, + mapping_rule: Optional[conversionworkspace_resources.MappingRule] = None, + mapping_rule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversionworkspace_resources.MappingRule: + r"""Creates a new mapping rule for a given conversion + workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_create_mapping_rule(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + mapping_rule = clouddms_v1.MappingRule() + mapping_rule.single_entity_rename.new_name = "new_name_value" + mapping_rule.rule_scope = "DATABASE_ENTITY_TYPE_DATABASE" + mapping_rule.rule_order = 1075 + + request = clouddms_v1.CreateMappingRuleRequest( + parent="parent_value", + mapping_rule_id="mapping_rule_id_value", + mapping_rule=mapping_rule, + ) + + # Make the request + response = await client.create_mapping_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CreateMappingRuleRequest, dict]]): + The request object. Request message for + 'CreateMappingRule' command. + parent (:class:`str`): + Required. The parent which owns this + collection of mapping rules. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mapping_rule (:class:`google.cloud.clouddms_v1.types.MappingRule`): + Required. Represents a [mapping rule] + (https://cloud.google.com/database-migration/reference/rest/v1/projects.locations.mappingRules) + object. + + This corresponds to the ``mapping_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mapping_rule_id (:class:`str`): + Required. The ID of the rule to + create. + + This corresponds to the ``mapping_rule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.MappingRule: + Definition of a transformation that + is to be applied to a group of entities + in the source schema. Several such + transformations can be applied to an + entity sequentially to define the + corresponding entity in the target + schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, mapping_rule, mapping_rule_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.CreateMappingRuleRequest): + request = clouddms.CreateMappingRuleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if mapping_rule is not None: + request.mapping_rule = mapping_rule + if mapping_rule_id is not None: + request.mapping_rule_id = mapping_rule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_mapping_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_mapping_rule(self, + request: Optional[Union[clouddms.DeleteMappingRuleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a single mapping rule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_delete_mapping_rule(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteMappingRuleRequest( + name="name_value", + ) + + # Make the request + await client.delete_mapping_rule(request=request) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DeleteMappingRuleRequest, dict]]): + The request object. Request message for + 'DeleteMappingRule' request. + name (:class:`str`): + Required. Name of the mapping rule + resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.DeleteMappingRuleRequest): + request = clouddms.DeleteMappingRuleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_mapping_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_mapping_rules(self, + request: Optional[Union[clouddms.ListMappingRulesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMappingRulesAsyncPager: + r"""Lists the mapping rules for a specific conversion + workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_list_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListMappingRulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mapping_rules(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ListMappingRulesRequest, dict]]): + The request object. Retrieve a list of all mapping rules + in a given conversion workspace. + parent (:class:`str`): + Required. Name of the conversion workspace resource + whose mapping rules are listed in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMappingRulesAsyncPager: + Response message for + 'ListMappingRulesRequest' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ListMappingRulesRequest): + request = clouddms.ListMappingRulesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_mapping_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMappingRulesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_mapping_rule(self, + request: Optional[Union[clouddms.GetMappingRuleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversionworkspace_resources.MappingRule: + r"""Gets the details of a mapping rule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_get_mapping_rule(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetMappingRuleRequest( + name="name_value", + ) + + # Make the request + response = await client.get_mapping_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GetMappingRuleRequest, dict]]): + The request object. Request message for 'GetMappingRule' + request. + name (:class:`str`): + Required. Name of the mapping rule + resource to get. Example: + conversionWorkspaces/123/mappingRules/rule123 + + In order to retrieve a previous revision + of the mapping rule, also provide the + revision ID. + Example: + + conversionWorkspace/123/mappingRules/rule123@c7cfa2a8c7cfa2a8c7cfa2a8c7cfa2a8 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.MappingRule: + Definition of a transformation that + is to be applied to a group of entities + in the source schema. Several such + transformations can be applied to an + entity sequentially to define the + corresponding entity in the target + schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.GetMappingRuleRequest): + request = clouddms.GetMappingRuleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_mapping_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def seed_conversion_workspace(self, + request: Optional[Union[clouddms.SeedConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports a snapshot of the source database into the + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_seed_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.SeedConversionWorkspaceRequest( + source_connection_profile="source_connection_profile_value", + ) + + # Make the request + operation = client.seed_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'SeedConversionWorkspace' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.SeedConversionWorkspaceRequest): + request = clouddms.SeedConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.seed_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def import_mapping_rules(self, + request: Optional[Union[clouddms.ImportMappingRulesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_import_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + rules_files = clouddms_v1.RulesFile() + rules_files.rules_source_filename = "rules_source_filename_value" + rules_files.rules_content = "rules_content_value" + + request = clouddms_v1.ImportMappingRulesRequest( + parent="parent_value", + rules_format="IMPORT_RULES_FILE_FORMAT_ORATOPG_CONFIG_FILE", + rules_files=rules_files, + auto_commit=True, + ) + + # Make the request + operation = client.import_mapping_rules(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ImportMappingRulesRequest, dict]]): + The request object. Request message for + 'ImportMappingRules' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ImportMappingRulesRequest): + request = clouddms.ImportMappingRulesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.import_mapping_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def convert_conversion_workspace(self, + request: Optional[Union[clouddms.ConvertConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a draft tree schema for the destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_convert_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ConvertConversionWorkspaceRequest( + ) + + # Make the request + operation = client.convert_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'ConvertConversionWorkspace' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ConvertConversionWorkspaceRequest): + request = clouddms.ConvertConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.convert_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def commit_conversion_workspace(self, + request: Optional[Union[clouddms.CommitConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Marks all the data in the conversion workspace as + committed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_commit_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.CommitConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.commit_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'CommitConversionWorkspace' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.CommitConversionWorkspaceRequest): + request = clouddms.CommitConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.commit_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def rollback_conversion_workspace(self, + request: Optional[Union[clouddms.RollbackConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Rolls back a conversion workspace to the last + committed snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_rollback_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.RollbackConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'RollbackConversionWorkspace' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.RollbackConversionWorkspaceRequest): + request = clouddms.RollbackConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.rollback_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def apply_conversion_workspace(self, + request: Optional[Union[clouddms.ApplyConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Applies draft tree onto a specific destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_apply_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ApplyConversionWorkspaceRequest( + connection_profile="connection_profile_value", + name="name_value", + ) + + # Make the request + operation = client.apply_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'ApplyConversionWorkspace' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ApplyConversionWorkspaceRequest): + request = clouddms.ApplyConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.apply_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def describe_database_entities(self, + request: Optional[Union[clouddms.DescribeDatabaseEntitiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.DescribeDatabaseEntitiesAsyncPager: + r"""Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_describe_database_entities(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeDatabaseEntitiesRequest( + conversion_workspace="conversion_workspace_value", + tree="DESTINATION_TREE", + ) + + # Make the request + page_result = client.describe_database_entities(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest, dict]]): + The request object. Request message for + 'DescribeDatabaseEntities' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesAsyncPager: + Response message for + 'DescribeDatabaseEntities' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.DescribeDatabaseEntitiesRequest): + request = clouddms.DescribeDatabaseEntitiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.describe_database_entities] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace", request.conversion_workspace), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.DescribeDatabaseEntitiesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def search_background_jobs(self, + request: Optional[Union[clouddms.SearchBackgroundJobsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.SearchBackgroundJobsResponse: + r"""Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_search_background_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.SearchBackgroundJobsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = await client.search_background_jobs(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest, dict]]): + The request object. Request message for + 'SearchBackgroundJobs' request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse: + Response message for + 'SearchBackgroundJobs' request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.SearchBackgroundJobsRequest): + request = clouddms.SearchBackgroundJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.search_background_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace", request.conversion_workspace), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def describe_conversion_workspace_revisions(self, + request: Optional[Union[clouddms.DescribeConversionWorkspaceRevisionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.DescribeConversionWorkspaceRevisionsResponse: + r"""Retrieves a list of committed revisions of a specific + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_describe_conversion_workspace_revisions(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = await client.describe_conversion_workspace_revisions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest, dict]]): + The request object. Request message for + 'DescribeConversionWorkspaceRevisions' + request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse: + Response message for + 'DescribeConversionWorkspaceRevisions' + request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.DescribeConversionWorkspaceRevisionsRequest): + request = clouddms.DescribeConversionWorkspaceRevisionsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.describe_conversion_workspace_revisions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace", request.conversion_workspace), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_static_ips(self, + request: Optional[Union[clouddms.FetchStaticIpsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchStaticIpsAsyncPager: + r"""Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_fetch_static_ips(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.FetchStaticIpsRequest( + name="name_value", + ) + + # Make the request + page_result = client.fetch_static_ips(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.FetchStaticIpsRequest, dict]]): + The request object. Request message for 'FetchStaticIps' + request. + name (:class:`str`): + Required. The resource name for the location for which + static IPs should be returned. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsAsyncPager: + Response message for a + 'FetchStaticIps' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.FetchStaticIpsRequest): + request = clouddms.FetchStaticIpsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.fetch_static_ips] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchStaticIpsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DataMigrationServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DataMigrationServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/client.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/client.py new file mode 100644 index 000000000000..11d9dab6dd71 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/client.py @@ -0,0 +1,5823 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.clouddms_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.clouddms_v1.services.data_migration_service import pagers +from google.cloud.clouddms_v1.types import clouddms +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import DataMigrationServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DataMigrationServiceGrpcTransport +from .transports.grpc_asyncio import DataMigrationServiceGrpcAsyncIOTransport + + +class DataMigrationServiceClientMeta(type): + """Metaclass for the DataMigrationService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DataMigrationServiceTransport]] + _transport_registry["grpc"] = DataMigrationServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DataMigrationServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DataMigrationServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataMigrationServiceClient(metaclass=DataMigrationServiceClientMeta): + """Database Migration service""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "datamigration.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "datamigration.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataMigrationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataMigrationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataMigrationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataMigrationServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def connection_profile_path(project: str,location: str,connection_profile: str,) -> str: + """Returns a fully-qualified connection_profile string.""" + return "projects/{project}/locations/{location}/connectionProfiles/{connection_profile}".format(project=project, location=location, connection_profile=connection_profile, ) + + @staticmethod + def parse_connection_profile_path(path: str) -> Dict[str,str]: + """Parses a connection_profile path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connectionProfiles/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def conversion_workspace_path(project: str,location: str,conversion_workspace: str,) -> str: + """Returns a fully-qualified conversion_workspace string.""" + return "projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}".format(project=project, location=location, conversion_workspace=conversion_workspace, ) + + @staticmethod + def parse_conversion_workspace_path(path: str) -> Dict[str,str]: + """Parses a conversion_workspace path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/conversionWorkspaces/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def mapping_rule_path(project: str,location: str,conversion_workspace: str,mapping_rule: str,) -> str: + """Returns a fully-qualified mapping_rule string.""" + return "projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}/mappingRules/{mapping_rule}".format(project=project, location=location, conversion_workspace=conversion_workspace, mapping_rule=mapping_rule, ) + + @staticmethod + def parse_mapping_rule_path(path: str) -> Dict[str,str]: + """Parses a mapping_rule path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/conversionWorkspaces/(?P.+?)/mappingRules/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def migration_job_path(project: str,location: str,migration_job: str,) -> str: + """Returns a fully-qualified migration_job string.""" + return "projects/{project}/locations/{location}/migrationJobs/{migration_job}".format(project=project, location=location, migration_job=migration_job, ) + + @staticmethod + def parse_migration_job_path(path: str) -> Dict[str,str]: + """Parses a migration_job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/migrationJobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def networks_path(project: str,network: str,) -> str: + """Returns a fully-qualified networks string.""" + return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + + @staticmethod + def parse_networks_path(path: str) -> Dict[str,str]: + """Parses a networks path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def private_connection_path(project: str,location: str,private_connection: str,) -> str: + """Returns a fully-qualified private_connection string.""" + return "projects/{project}/locations/{location}/privateConnections/{private_connection}".format(project=project, location=location, private_connection=private_connection, ) + + @staticmethod + def parse_private_connection_path(path: str) -> Dict[str,str]: + """Parses a private_connection path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/privateConnections/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = DataMigrationServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = DataMigrationServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DataMigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DataMigrationServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataMigrationServiceTransport, Callable[..., DataMigrationServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data migration service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataMigrationServiceTransport,Callable[..., DataMigrationServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataMigrationServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataMigrationServiceClient._read_environment_variables() + self._client_cert_source = DataMigrationServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = DataMigrationServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DataMigrationServiceTransport) + if transport_provided: + # transport is a DataMigrationServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DataMigrationServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + DataMigrationServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[DataMigrationServiceTransport], Callable[..., DataMigrationServiceTransport]] = ( + DataMigrationServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DataMigrationServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_migration_jobs(self, + request: Optional[Union[clouddms.ListMigrationJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMigrationJobsPager: + r"""Lists migration jobs in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_list_migration_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListMigrationJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ListMigrationJobsRequest, dict]): + The request object. Retrieves a list of all migration + jobs in a given project and location. + parent (str): + Required. The parent which owns this + collection of migrationJobs. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsPager: + Response message for + 'ListMigrationJobs' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ListMigrationJobsRequest): + request = clouddms.ListMigrationJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_migration_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMigrationJobsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_migration_job(self, + request: Optional[Union[clouddms.GetMigrationJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.MigrationJob: + r"""Gets details of a single migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_get_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetMigrationJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GetMigrationJobRequest, dict]): + The request object. Request message for 'GetMigrationJob' + request. + name (str): + Required. Name of the migration job + resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.MigrationJob: + Represents a Database Migration + Service migration job object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.GetMigrationJobRequest): + request = clouddms.GetMigrationJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_migration_job(self, + request: Optional[Union[clouddms.CreateMigrationJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + migration_job: Optional[clouddms_resources.MigrationJob] = None, + migration_job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new migration job in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_create_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.CreateMigrationJobRequest( + parent="parent_value", + migration_job_id="migration_job_id_value", + migration_job=migration_job, + ) + + # Make the request + operation = client.create_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CreateMigrationJobRequest, dict]): + The request object. Request message to create a new + Database Migration Service migration job + in the specified project and region. + parent (str): + Required. The parent which owns this + collection of migration jobs. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + migration_job (google.cloud.clouddms_v1.types.MigrationJob): + Required. Represents a `migration + job `__ + object. + + This corresponds to the ``migration_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + migration_job_id (str): + Required. The ID of the instance to + create. + + This corresponds to the ``migration_job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, migration_job, migration_job_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.CreateMigrationJobRequest): + request = clouddms.CreateMigrationJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if migration_job is not None: + request.migration_job = migration_job + if migration_job_id is not None: + request.migration_job_id = migration_job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_migration_job(self, + request: Optional[Union[clouddms.UpdateMigrationJobRequest, dict]] = None, + *, + migration_job: Optional[clouddms_resources.MigrationJob] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_update_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.UpdateMigrationJobRequest( + migration_job=migration_job, + ) + + # Make the request + operation = client.update_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.UpdateMigrationJobRequest, dict]): + The request object. Request message for + 'UpdateMigrationJob' request. + migration_job (google.cloud.clouddms_v1.types.MigrationJob): + Required. The migration job + parameters to update. + + This corresponds to the ``migration_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([migration_job, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.UpdateMigrationJobRequest): + request = clouddms.UpdateMigrationJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if migration_job is not None: + request.migration_job = migration_job + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("migration_job.name", request.migration_job.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_migration_job(self, + request: Optional[Union[clouddms.DeleteMigrationJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_delete_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteMigrationJobRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DeleteMigrationJobRequest, dict]): + The request object. Request message for + 'DeleteMigrationJob' request. + name (str): + Required. Name of the migration job + resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.DeleteMigrationJobRequest): + request = clouddms.DeleteMigrationJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def start_migration_job(self, + request: Optional[Union[clouddms.StartMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Start an already created migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_start_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.StartMigrationJobRequest( + ) + + # Make the request + operation = client.start_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.StartMigrationJobRequest, dict]): + The request object. Request message for + 'StartMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.StartMigrationJobRequest): + request = clouddms.StartMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def stop_migration_job(self, + request: Optional[Union[clouddms.StopMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Stops a running migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_stop_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.StopMigrationJobRequest( + ) + + # Make the request + operation = client.stop_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.StopMigrationJobRequest, dict]): + The request object. Request message for + 'StopMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.StopMigrationJobRequest): + request = clouddms.StopMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def resume_migration_job(self, + request: Optional[Union[clouddms.ResumeMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Resume a migration job that is currently stopped and + is resumable (was stopped during CDC phase). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_resume_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ResumeMigrationJobRequest( + ) + + # Make the request + operation = client.resume_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ResumeMigrationJobRequest, dict]): + The request object. Request message for + 'ResumeMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ResumeMigrationJobRequest): + request = clouddms.ResumeMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def promote_migration_job(self, + request: Optional[Union[clouddms.PromoteMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Promote a migration job, stopping replication to the + destination and promoting the destination to be a + standalone database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_promote_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.PromoteMigrationJobRequest( + ) + + # Make the request + operation = client.promote_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.PromoteMigrationJobRequest, dict]): + The request object. Request message for + 'PromoteMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.PromoteMigrationJobRequest): + request = clouddms.PromoteMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.promote_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def verify_migration_job(self, + request: Optional[Union[clouddms.VerifyMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Verify a migration job, making sure the destination + can reach the source and that all configuration and + prerequisites are met. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_verify_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.VerifyMigrationJobRequest( + ) + + # Make the request + operation = client.verify_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.VerifyMigrationJobRequest, dict]): + The request object. Request message for + 'VerifyMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.VerifyMigrationJobRequest): + request = clouddms.VerifyMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.verify_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def restart_migration_job(self, + request: Optional[Union[clouddms.RestartMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Restart a stopped or failed migration job, resetting + the destination instance to its original state and + starting the migration process from scratch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_restart_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.RestartMigrationJobRequest( + ) + + # Make the request + operation = client.restart_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.RestartMigrationJobRequest, dict]): + The request object. Request message for + 'RestartMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.RestartMigrationJobRequest): + request = clouddms.RestartMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restart_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def generate_ssh_script(self, + request: Optional[Union[clouddms.GenerateSshScriptRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.SshScript: + r"""Generate a SSH configuration script to configure the + reverse SSH connectivity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_generate_ssh_script(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + vm_creation_config = clouddms_v1.VmCreationConfig() + vm_creation_config.vm_machine_type = "vm_machine_type_value" + + request = clouddms_v1.GenerateSshScriptRequest( + vm_creation_config=vm_creation_config, + vm="vm_value", + ) + + # Make the request + response = client.generate_ssh_script(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GenerateSshScriptRequest, dict]): + The request object. Request message for + 'GenerateSshScript' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.SshScript: + Response message for + 'GenerateSshScript' request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.GenerateSshScriptRequest): + request = clouddms.GenerateSshScriptRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_ssh_script] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("migration_job", request.migration_job), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def generate_tcp_proxy_script(self, + request: Optional[Union[clouddms.GenerateTcpProxyScriptRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.TcpProxyScript: + r"""Generate a TCP Proxy configuration script to + configure a cloud-hosted VM running a TCP Proxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_generate_tcp_proxy_script(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GenerateTcpProxyScriptRequest( + vm_name="vm_name_value", + vm_machine_type="vm_machine_type_value", + vm_subnet="vm_subnet_value", + ) + + # Make the request + response = client.generate_tcp_proxy_script(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GenerateTcpProxyScriptRequest, dict]): + The request object. Request message for + 'GenerateTcpProxyScript' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.TcpProxyScript: + Response message for + 'GenerateTcpProxyScript' request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.GenerateTcpProxyScriptRequest): + request = clouddms.GenerateTcpProxyScriptRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_tcp_proxy_script] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("migration_job", request.migration_job), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_connection_profiles(self, + request: Optional[Union[clouddms.ListConnectionProfilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConnectionProfilesPager: + r"""Retrieves a list of all connection profiles in a + given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_list_connection_profiles(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConnectionProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connection_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ListConnectionProfilesRequest, dict]): + The request object. Request message for + 'ListConnectionProfiles' request. + parent (str): + Required. The parent which owns this + collection of connection profiles. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesPager: + Response message for + 'ListConnectionProfiles' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ListConnectionProfilesRequest): + request = clouddms.ListConnectionProfilesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_connection_profiles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListConnectionProfilesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_connection_profile(self, + request: Optional[Union[clouddms.GetConnectionProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.ConnectionProfile: + r"""Gets details of a single connection profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_get_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConnectionProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_connection_profile(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GetConnectionProfileRequest, dict]): + The request object. Request message for + 'GetConnectionProfile' request. + name (str): + Required. Name of the connection + profile resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.ConnectionProfile: + A connection profile definition. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.GetConnectionProfileRequest): + request = clouddms.GetConnectionProfileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_connection_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_connection_profile(self, + request: Optional[Union[clouddms.CreateConnectionProfileRequest, dict]] = None, + *, + parent: Optional[str] = None, + connection_profile: Optional[clouddms_resources.ConnectionProfile] = None, + connection_profile_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new connection profile in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_create_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.CreateConnectionProfileRequest( + parent="parent_value", + connection_profile_id="connection_profile_id_value", + connection_profile=connection_profile, + ) + + # Make the request + operation = client.create_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CreateConnectionProfileRequest, dict]): + The request object. Request message for + 'CreateConnectionProfile' request. + parent (str): + Required. The parent which owns this + collection of connection profiles. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_profile (google.cloud.clouddms_v1.types.ConnectionProfile): + Required. The create request body + including the connection profile data + + This corresponds to the ``connection_profile`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_profile_id (str): + Required. The connection profile + identifier. + + This corresponds to the ``connection_profile_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConnectionProfile` + A connection profile definition. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, connection_profile, connection_profile_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.CreateConnectionProfileRequest): + request = clouddms.CreateConnectionProfileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if connection_profile is not None: + request.connection_profile = connection_profile + if connection_profile_id is not None: + request.connection_profile_id = connection_profile_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_connection_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.ConnectionProfile, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_connection_profile(self, + request: Optional[Union[clouddms.UpdateConnectionProfileRequest, dict]] = None, + *, + connection_profile: Optional[clouddms_resources.ConnectionProfile] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Update the configuration of a single connection + profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_update_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.UpdateConnectionProfileRequest( + connection_profile=connection_profile, + ) + + # Make the request + operation = client.update_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.UpdateConnectionProfileRequest, dict]): + The request object. Request message for + 'UpdateConnectionProfile' request. + connection_profile (google.cloud.clouddms_v1.types.ConnectionProfile): + Required. The connection profile + parameters to update. + + This corresponds to the ``connection_profile`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConnectionProfile` + A connection profile definition. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([connection_profile, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.UpdateConnectionProfileRequest): + request = clouddms.UpdateConnectionProfileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if connection_profile is not None: + request.connection_profile = connection_profile + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_connection_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("connection_profile.name", request.connection_profile.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.ConnectionProfile, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_connection_profile(self, + request: Optional[Union[clouddms.DeleteConnectionProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Database Migration Service + connection profile. A connection profile can only be + deleted if it is not in use by any active migration + jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_delete_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConnectionProfileRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest, dict]): + The request object. Request message for + 'DeleteConnectionProfile' request. + name (str): + Required. Name of the connection + profile resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.DeleteConnectionProfileRequest): + request = clouddms.DeleteConnectionProfileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_connection_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_private_connection(self, + request: Optional[Union[clouddms.CreatePrivateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + private_connection: Optional[clouddms_resources.PrivateConnection] = None, + private_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new private connection in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_create_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + private_connection = clouddms_v1.PrivateConnection() + private_connection.vpc_peering_config.vpc_name = "vpc_name_value" + private_connection.vpc_peering_config.subnet = "subnet_value" + + request = clouddms_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest, dict]): + The request object. Request message to create a new + private connection in the specified + project and region. + parent (str): + Required. The parent that owns the + collection of PrivateConnections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection (google.cloud.clouddms_v1.types.PrivateConnection): + Required. The private connection + resource to create. + + This corresponds to the ``private_connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection_id (str): + Required. The private connection + identifier. + + This corresponds to the ``private_connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.clouddms_v1.types.PrivateConnection` The PrivateConnection resource is used to establish private connectivity + with the customer's network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, private_connection, private_connection_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.CreatePrivateConnectionRequest): + request = clouddms.CreatePrivateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if private_connection is not None: + request.private_connection = private_connection + if private_connection_id is not None: + request.private_connection_id = private_connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_private_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.PrivateConnection, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_private_connection(self, + request: Optional[Union[clouddms.GetPrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.PrivateConnection: + r"""Gets details of a single private connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_get_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetPrivateConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_private_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GetPrivateConnectionRequest, dict]): + The request object. Request message to get a private + connection resource. + name (str): + Required. The name of the private + connection to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.PrivateConnection: + The PrivateConnection resource is + used to establish private connectivity + with the customer's network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.GetPrivateConnectionRequest): + request = clouddms.GetPrivateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_private_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_private_connections(self, + request: Optional[Union[clouddms.ListPrivateConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPrivateConnectionsPager: + r"""Retrieves a list of private connections in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_list_private_connections(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListPrivateConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_private_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest, dict]): + The request object. Request message to retrieve a list of + private connections in a given project + and location. + parent (str): + Required. The parent that owns the + collection of private connections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsPager: + Response message for + 'ListPrivateConnections' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ListPrivateConnectionsRequest): + request = clouddms.ListPrivateConnectionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_private_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPrivateConnectionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_private_connection(self, + request: Optional[Union[clouddms.DeletePrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Database Migration Service private + connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_delete_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeletePrivateConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest, dict]): + The request object. Request message to delete a private + connection. + name (str): + Required. The name of the private + connection to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.DeletePrivateConnectionRequest): + request = clouddms.DeletePrivateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_private_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_conversion_workspace(self, + request: Optional[Union[clouddms.GetConversionWorkspaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversionworkspace_resources.ConversionWorkspace: + r"""Gets details of a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_get_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = client.get_conversion_workspace(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest, dict]): + The request object. Request message for + 'GetConversionWorkspace' request. + name (str): + Required. Name of the conversion + workspace resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.ConversionWorkspace: + The main conversion workspace + resource entity. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.GetConversionWorkspaceRequest): + request = clouddms.GetConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_conversion_workspaces(self, + request: Optional[Union[clouddms.ListConversionWorkspacesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConversionWorkspacesPager: + r"""Lists conversion workspaces in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_list_conversion_workspaces(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConversionWorkspacesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversion_workspaces(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest, dict]): + The request object. Retrieve a list of all conversion + workspaces in a given project and + location. + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesPager: + Response message for + 'ListConversionWorkspaces' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ListConversionWorkspacesRequest): + request = clouddms.ListConversionWorkspacesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_conversion_workspaces] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListConversionWorkspacesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_conversion_workspace(self, + request: Optional[Union[clouddms.CreateConversionWorkspaceRequest, dict]] = None, + *, + parent: Optional[str] = None, + conversion_workspace: Optional[conversionworkspace_resources.ConversionWorkspace] = None, + conversion_workspace_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new conversion workspace in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_create_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.CreateConversionWorkspaceRequest( + parent="parent_value", + conversion_workspace_id="conversion_workspace_id_value", + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.create_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest, dict]): + The request object. Request message to create a new + Conversion Workspace in the specified + project and region. + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. Represents a conversion + workspace object. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace_id (str): + Required. The ID of the conversion + workspace to create. + + This corresponds to the ``conversion_workspace_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, conversion_workspace, conversion_workspace_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.CreateConversionWorkspaceRequest): + request = clouddms.CreateConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if conversion_workspace_id is not None: + request.conversion_workspace_id = conversion_workspace_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_conversion_workspace(self, + request: Optional[Union[clouddms.UpdateConversionWorkspaceRequest, dict]] = None, + *, + conversion_workspace: Optional[conversionworkspace_resources.ConversionWorkspace] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single conversion + workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_update_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.UpdateConversionWorkspaceRequest( + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.update_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest, dict]): + The request object. Request message for + 'UpdateConversionWorkspace' request. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. The conversion workspace + parameters to update. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([conversion_workspace, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.UpdateConversionWorkspaceRequest): + request = clouddms.UpdateConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace.name", request.conversion_workspace.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_conversion_workspace(self, + request: Optional[Union[clouddms.DeleteConversionWorkspaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_delete_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest, dict]): + The request object. Request message for + 'DeleteConversionWorkspace' request. + name (str): + Required. Name of the conversion + workspace resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.DeleteConversionWorkspaceRequest): + request = clouddms.DeleteConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_mapping_rule(self, + request: Optional[Union[clouddms.CreateMappingRuleRequest, dict]] = None, + *, + parent: Optional[str] = None, + mapping_rule: Optional[conversionworkspace_resources.MappingRule] = None, + mapping_rule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversionworkspace_resources.MappingRule: + r"""Creates a new mapping rule for a given conversion + workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_create_mapping_rule(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + mapping_rule = clouddms_v1.MappingRule() + mapping_rule.single_entity_rename.new_name = "new_name_value" + mapping_rule.rule_scope = "DATABASE_ENTITY_TYPE_DATABASE" + mapping_rule.rule_order = 1075 + + request = clouddms_v1.CreateMappingRuleRequest( + parent="parent_value", + mapping_rule_id="mapping_rule_id_value", + mapping_rule=mapping_rule, + ) + + # Make the request + response = client.create_mapping_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CreateMappingRuleRequest, dict]): + The request object. Request message for + 'CreateMappingRule' command. + parent (str): + Required. The parent which owns this + collection of mapping rules. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mapping_rule (google.cloud.clouddms_v1.types.MappingRule): + Required. Represents a [mapping rule] + (https://cloud.google.com/database-migration/reference/rest/v1/projects.locations.mappingRules) + object. + + This corresponds to the ``mapping_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mapping_rule_id (str): + Required. The ID of the rule to + create. + + This corresponds to the ``mapping_rule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.MappingRule: + Definition of a transformation that + is to be applied to a group of entities + in the source schema. Several such + transformations can be applied to an + entity sequentially to define the + corresponding entity in the target + schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, mapping_rule, mapping_rule_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.CreateMappingRuleRequest): + request = clouddms.CreateMappingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if mapping_rule is not None: + request.mapping_rule = mapping_rule + if mapping_rule_id is not None: + request.mapping_rule_id = mapping_rule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_mapping_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_mapping_rule(self, + request: Optional[Union[clouddms.DeleteMappingRuleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a single mapping rule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_delete_mapping_rule(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteMappingRuleRequest( + name="name_value", + ) + + # Make the request + client.delete_mapping_rule(request=request) + + Args: + request (Union[google.cloud.clouddms_v1.types.DeleteMappingRuleRequest, dict]): + The request object. Request message for + 'DeleteMappingRule' request. + name (str): + Required. Name of the mapping rule + resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.DeleteMappingRuleRequest): + request = clouddms.DeleteMappingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_mapping_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_mapping_rules(self, + request: Optional[Union[clouddms.ListMappingRulesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMappingRulesPager: + r"""Lists the mapping rules for a specific conversion + workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_list_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListMappingRulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mapping_rules(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ListMappingRulesRequest, dict]): + The request object. Retrieve a list of all mapping rules + in a given conversion workspace. + parent (str): + Required. Name of the conversion workspace resource + whose mapping rules are listed in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMappingRulesPager: + Response message for + 'ListMappingRulesRequest' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ListMappingRulesRequest): + request = clouddms.ListMappingRulesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_mapping_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMappingRulesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_mapping_rule(self, + request: Optional[Union[clouddms.GetMappingRuleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversionworkspace_resources.MappingRule: + r"""Gets the details of a mapping rule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_get_mapping_rule(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetMappingRuleRequest( + name="name_value", + ) + + # Make the request + response = client.get_mapping_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GetMappingRuleRequest, dict]): + The request object. Request message for 'GetMappingRule' + request. + name (str): + Required. Name of the mapping rule + resource to get. Example: + conversionWorkspaces/123/mappingRules/rule123 + + In order to retrieve a previous revision + of the mapping rule, also provide the + revision ID. + Example: + + conversionWorkspace/123/mappingRules/rule123@c7cfa2a8c7cfa2a8c7cfa2a8c7cfa2a8 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.MappingRule: + Definition of a transformation that + is to be applied to a group of entities + in the source schema. Several such + transformations can be applied to an + entity sequentially to define the + corresponding entity in the target + schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.GetMappingRuleRequest): + request = clouddms.GetMappingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_mapping_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def seed_conversion_workspace(self, + request: Optional[Union[clouddms.SeedConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports a snapshot of the source database into the + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_seed_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.SeedConversionWorkspaceRequest( + source_connection_profile="source_connection_profile_value", + ) + + # Make the request + operation = client.seed_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest, dict]): + The request object. Request message for + 'SeedConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.SeedConversionWorkspaceRequest): + request = clouddms.SeedConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.seed_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def import_mapping_rules(self, + request: Optional[Union[clouddms.ImportMappingRulesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_import_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + rules_files = clouddms_v1.RulesFile() + rules_files.rules_source_filename = "rules_source_filename_value" + rules_files.rules_content = "rules_content_value" + + request = clouddms_v1.ImportMappingRulesRequest( + parent="parent_value", + rules_format="IMPORT_RULES_FILE_FORMAT_ORATOPG_CONFIG_FILE", + rules_files=rules_files, + auto_commit=True, + ) + + # Make the request + operation = client.import_mapping_rules(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ImportMappingRulesRequest, dict]): + The request object. Request message for + 'ImportMappingRules' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ImportMappingRulesRequest): + request = clouddms.ImportMappingRulesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_mapping_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def convert_conversion_workspace(self, + request: Optional[Union[clouddms.ConvertConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a draft tree schema for the destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_convert_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ConvertConversionWorkspaceRequest( + ) + + # Make the request + operation = client.convert_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest, dict]): + The request object. Request message for + 'ConvertConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ConvertConversionWorkspaceRequest): + request = clouddms.ConvertConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.convert_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def commit_conversion_workspace(self, + request: Optional[Union[clouddms.CommitConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Marks all the data in the conversion workspace as + committed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_commit_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.CommitConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.commit_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest, dict]): + The request object. Request message for + 'CommitConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.CommitConversionWorkspaceRequest): + request = clouddms.CommitConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.commit_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def rollback_conversion_workspace(self, + request: Optional[Union[clouddms.RollbackConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Rolls back a conversion workspace to the last + committed snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_rollback_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.RollbackConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest, dict]): + The request object. Request message for + 'RollbackConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.RollbackConversionWorkspaceRequest): + request = clouddms.RollbackConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rollback_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def apply_conversion_workspace(self, + request: Optional[Union[clouddms.ApplyConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Applies draft tree onto a specific destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_apply_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ApplyConversionWorkspaceRequest( + connection_profile="connection_profile_value", + name="name_value", + ) + + # Make the request + operation = client.apply_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest, dict]): + The request object. Request message for + 'ApplyConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.ApplyConversionWorkspaceRequest): + request = clouddms.ApplyConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.apply_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def describe_database_entities(self, + request: Optional[Union[clouddms.DescribeDatabaseEntitiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.DescribeDatabaseEntitiesPager: + r"""Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_describe_database_entities(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeDatabaseEntitiesRequest( + conversion_workspace="conversion_workspace_value", + tree="DESTINATION_TREE", + ) + + # Make the request + page_result = client.describe_database_entities(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest, dict]): + The request object. Request message for + 'DescribeDatabaseEntities' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesPager: + Response message for + 'DescribeDatabaseEntities' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.DescribeDatabaseEntitiesRequest): + request = clouddms.DescribeDatabaseEntitiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.describe_database_entities] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace", request.conversion_workspace), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.DescribeDatabaseEntitiesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def search_background_jobs(self, + request: Optional[Union[clouddms.SearchBackgroundJobsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.SearchBackgroundJobsResponse: + r"""Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_search_background_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.SearchBackgroundJobsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = client.search_background_jobs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest, dict]): + The request object. Request message for + 'SearchBackgroundJobs' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse: + Response message for + 'SearchBackgroundJobs' request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.SearchBackgroundJobsRequest): + request = clouddms.SearchBackgroundJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_background_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace", request.conversion_workspace), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def describe_conversion_workspace_revisions(self, + request: Optional[Union[clouddms.DescribeConversionWorkspaceRevisionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.DescribeConversionWorkspaceRevisionsResponse: + r"""Retrieves a list of committed revisions of a specific + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_describe_conversion_workspace_revisions(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = client.describe_conversion_workspace_revisions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest, dict]): + The request object. Request message for + 'DescribeConversionWorkspaceRevisions' + request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse: + Response message for + 'DescribeConversionWorkspaceRevisions' + request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.DescribeConversionWorkspaceRevisionsRequest): + request = clouddms.DescribeConversionWorkspaceRevisionsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.describe_conversion_workspace_revisions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace", request.conversion_workspace), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_static_ips(self, + request: Optional[Union[clouddms.FetchStaticIpsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchStaticIpsPager: + r"""Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_fetch_static_ips(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.FetchStaticIpsRequest( + name="name_value", + ) + + # Make the request + page_result = client.fetch_static_ips(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.FetchStaticIpsRequest, dict]): + The request object. Request message for 'FetchStaticIps' + request. + name (str): + Required. The resource name for the location for which + static IPs should be returned. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsPager: + Response message for a + 'FetchStaticIps' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, clouddms.FetchStaticIpsRequest): + request = clouddms.FetchStaticIpsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_static_ips] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchStaticIpsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DataMigrationServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DataMigrationServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/pagers.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/pagers.py new file mode 100644 index 000000000000..3b65b3939c40 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/pagers.py @@ -0,0 +1,974 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.clouddms_v1.types import clouddms +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources + + +class ListMigrationJobsPager: + """A pager for iterating through ``list_migration_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListMigrationJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``migration_jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMigrationJobs`` requests and continue to iterate + through the ``migration_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListMigrationJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., clouddms.ListMigrationJobsResponse], + request: clouddms.ListMigrationJobsRequest, + response: clouddms.ListMigrationJobsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListMigrationJobsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListMigrationJobsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListMigrationJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.ListMigrationJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[clouddms_resources.MigrationJob]: + for page in self.pages: + yield from page.migration_jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMigrationJobsAsyncPager: + """A pager for iterating through ``list_migration_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListMigrationJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``migration_jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMigrationJobs`` requests and continue to iterate + through the ``migration_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListMigrationJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[clouddms.ListMigrationJobsResponse]], + request: clouddms.ListMigrationJobsRequest, + response: clouddms.ListMigrationJobsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListMigrationJobsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListMigrationJobsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListMigrationJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.ListMigrationJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[clouddms_resources.MigrationJob]: + async def async_generator(): + async for page in self.pages: + for response in page.migration_jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListConnectionProfilesPager: + """A pager for iterating through ``list_connection_profiles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListConnectionProfilesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``connection_profiles`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListConnectionProfiles`` requests and continue to iterate + through the ``connection_profiles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListConnectionProfilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., clouddms.ListConnectionProfilesResponse], + request: clouddms.ListConnectionProfilesRequest, + response: clouddms.ListConnectionProfilesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListConnectionProfilesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListConnectionProfilesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListConnectionProfilesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.ListConnectionProfilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[clouddms_resources.ConnectionProfile]: + for page in self.pages: + yield from page.connection_profiles + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListConnectionProfilesAsyncPager: + """A pager for iterating through ``list_connection_profiles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListConnectionProfilesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``connection_profiles`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListConnectionProfiles`` requests and continue to iterate + through the ``connection_profiles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListConnectionProfilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[clouddms.ListConnectionProfilesResponse]], + request: clouddms.ListConnectionProfilesRequest, + response: clouddms.ListConnectionProfilesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListConnectionProfilesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListConnectionProfilesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListConnectionProfilesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.ListConnectionProfilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[clouddms_resources.ConnectionProfile]: + async def async_generator(): + async for page in self.pages: + for response in page.connection_profiles: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPrivateConnectionsPager: + """A pager for iterating through ``list_private_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``private_connections`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPrivateConnections`` requests and continue to iterate + through the ``private_connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., clouddms.ListPrivateConnectionsResponse], + request: clouddms.ListPrivateConnectionsRequest, + response: clouddms.ListPrivateConnectionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListPrivateConnectionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.ListPrivateConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[clouddms_resources.PrivateConnection]: + for page in self.pages: + yield from page.private_connections + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPrivateConnectionsAsyncPager: + """A pager for iterating through ``list_private_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``private_connections`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPrivateConnections`` requests and continue to iterate + through the ``private_connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[clouddms.ListPrivateConnectionsResponse]], + request: clouddms.ListPrivateConnectionsRequest, + response: clouddms.ListPrivateConnectionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListPrivateConnectionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.ListPrivateConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[clouddms_resources.PrivateConnection]: + async def async_generator(): + async for page in self.pages: + for response in page.private_connections: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListConversionWorkspacesPager: + """A pager for iterating through ``list_conversion_workspaces`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``conversion_workspaces`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListConversionWorkspaces`` requests and continue to iterate + through the ``conversion_workspaces`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., clouddms.ListConversionWorkspacesResponse], + request: clouddms.ListConversionWorkspacesRequest, + response: clouddms.ListConversionWorkspacesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListConversionWorkspacesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.ListConversionWorkspacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[conversionworkspace_resources.ConversionWorkspace]: + for page in self.pages: + yield from page.conversion_workspaces + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListConversionWorkspacesAsyncPager: + """A pager for iterating through ``list_conversion_workspaces`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``conversion_workspaces`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListConversionWorkspaces`` requests and continue to iterate + through the ``conversion_workspaces`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[clouddms.ListConversionWorkspacesResponse]], + request: clouddms.ListConversionWorkspacesRequest, + response: clouddms.ListConversionWorkspacesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListConversionWorkspacesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.ListConversionWorkspacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[conversionworkspace_resources.ConversionWorkspace]: + async def async_generator(): + async for page in self.pages: + for response in page.conversion_workspaces: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMappingRulesPager: + """A pager for iterating through ``list_mapping_rules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListMappingRulesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``mapping_rules`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMappingRules`` requests and continue to iterate + through the ``mapping_rules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListMappingRulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., clouddms.ListMappingRulesResponse], + request: clouddms.ListMappingRulesRequest, + response: clouddms.ListMappingRulesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListMappingRulesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListMappingRulesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListMappingRulesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.ListMappingRulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[conversionworkspace_resources.MappingRule]: + for page in self.pages: + yield from page.mapping_rules + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMappingRulesAsyncPager: + """A pager for iterating through ``list_mapping_rules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListMappingRulesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``mapping_rules`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMappingRules`` requests and continue to iterate + through the ``mapping_rules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListMappingRulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[clouddms.ListMappingRulesResponse]], + request: clouddms.ListMappingRulesRequest, + response: clouddms.ListMappingRulesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListMappingRulesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListMappingRulesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListMappingRulesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.ListMappingRulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[conversionworkspace_resources.MappingRule]: + async def async_generator(): + async for page in self.pages: + for response in page.mapping_rules: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class DescribeDatabaseEntitiesPager: + """A pager for iterating through ``describe_database_entities`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``database_entities`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``DescribeDatabaseEntities`` requests and continue to iterate + through the ``database_entities`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., clouddms.DescribeDatabaseEntitiesResponse], + request: clouddms.DescribeDatabaseEntitiesRequest, + response: clouddms.DescribeDatabaseEntitiesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.DescribeDatabaseEntitiesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.DescribeDatabaseEntitiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[conversionworkspace_resources.DatabaseEntity]: + for page in self.pages: + yield from page.database_entities + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class DescribeDatabaseEntitiesAsyncPager: + """A pager for iterating through ``describe_database_entities`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``database_entities`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``DescribeDatabaseEntities`` requests and continue to iterate + through the ``database_entities`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[clouddms.DescribeDatabaseEntitiesResponse]], + request: clouddms.DescribeDatabaseEntitiesRequest, + response: clouddms.DescribeDatabaseEntitiesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.DescribeDatabaseEntitiesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.DescribeDatabaseEntitiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[conversionworkspace_resources.DatabaseEntity]: + async def async_generator(): + async for page in self.pages: + for response in page.database_entities: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class FetchStaticIpsPager: + """A pager for iterating through ``fetch_static_ips`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``static_ips`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchStaticIps`` requests and continue to iterate + through the ``static_ips`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., clouddms.FetchStaticIpsResponse], + request: clouddms.FetchStaticIpsRequest, + response: clouddms.FetchStaticIpsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.FetchStaticIpsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.FetchStaticIpsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.FetchStaticIpsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.FetchStaticIpsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[str]: + for page in self.pages: + yield from page.static_ips + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class FetchStaticIpsAsyncPager: + """A pager for iterating through ``fetch_static_ips`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``static_ips`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchStaticIps`` requests and continue to iterate + through the ``static_ips`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[clouddms.FetchStaticIpsResponse]], + request: clouddms.FetchStaticIpsRequest, + response: clouddms.FetchStaticIpsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.FetchStaticIpsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.FetchStaticIpsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.FetchStaticIpsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.FetchStaticIpsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[str]: + async def async_generator(): + async for page in self.pages: + for response in page.static_ips: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/README.rst b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/README.rst new file mode 100644 index 000000000000..9d82094a0c0f --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`DataMigrationServiceTransport` is the ABC for all transports. +- public child `DataMigrationServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `DataMigrationServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseDataMigrationServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `DataMigrationServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/__init__.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/__init__.py new file mode 100644 index 000000000000..315bde82a2ca --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataMigrationServiceTransport +from .grpc import DataMigrationServiceGrpcTransport +from .grpc_asyncio import DataMigrationServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DataMigrationServiceTransport]] +_transport_registry['grpc'] = DataMigrationServiceGrpcTransport +_transport_registry['grpc_asyncio'] = DataMigrationServiceGrpcAsyncIOTransport + +__all__ = ( + 'DataMigrationServiceTransport', + 'DataMigrationServiceGrpcTransport', + 'DataMigrationServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py new file mode 100644 index 000000000000..d8c23a8bf5e2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py @@ -0,0 +1,854 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.clouddms_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.clouddms_v1.types import clouddms +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class DataMigrationServiceTransport(abc.ABC): + """Abstract transport class for DataMigrationService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'datamigration.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'datamigration.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_migration_jobs: gapic_v1.method.wrap_method( + self.list_migration_jobs, + default_timeout=60.0, + client_info=client_info, + ), + self.get_migration_job: gapic_v1.method.wrap_method( + self.get_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.create_migration_job: gapic_v1.method.wrap_method( + self.create_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.update_migration_job: gapic_v1.method.wrap_method( + self.update_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_migration_job: gapic_v1.method.wrap_method( + self.delete_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.start_migration_job: gapic_v1.method.wrap_method( + self.start_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.stop_migration_job: gapic_v1.method.wrap_method( + self.stop_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.resume_migration_job: gapic_v1.method.wrap_method( + self.resume_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.promote_migration_job: gapic_v1.method.wrap_method( + self.promote_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.verify_migration_job: gapic_v1.method.wrap_method( + self.verify_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.restart_migration_job: gapic_v1.method.wrap_method( + self.restart_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.generate_ssh_script: gapic_v1.method.wrap_method( + self.generate_ssh_script, + default_timeout=60.0, + client_info=client_info, + ), + self.generate_tcp_proxy_script: gapic_v1.method.wrap_method( + self.generate_tcp_proxy_script, + default_timeout=None, + client_info=client_info, + ), + self.list_connection_profiles: gapic_v1.method.wrap_method( + self.list_connection_profiles, + default_timeout=60.0, + client_info=client_info, + ), + self.get_connection_profile: gapic_v1.method.wrap_method( + self.get_connection_profile, + default_timeout=60.0, + client_info=client_info, + ), + self.create_connection_profile: gapic_v1.method.wrap_method( + self.create_connection_profile, + default_timeout=60.0, + client_info=client_info, + ), + self.update_connection_profile: gapic_v1.method.wrap_method( + self.update_connection_profile, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_connection_profile: gapic_v1.method.wrap_method( + self.delete_connection_profile, + default_timeout=60.0, + client_info=client_info, + ), + self.create_private_connection: gapic_v1.method.wrap_method( + self.create_private_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.get_private_connection: gapic_v1.method.wrap_method( + self.get_private_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.list_private_connections: gapic_v1.method.wrap_method( + self.list_private_connections, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_private_connection: gapic_v1.method.wrap_method( + self.delete_private_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.get_conversion_workspace: gapic_v1.method.wrap_method( + self.get_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.list_conversion_workspaces: gapic_v1.method.wrap_method( + self.list_conversion_workspaces, + default_timeout=60.0, + client_info=client_info, + ), + self.create_conversion_workspace: gapic_v1.method.wrap_method( + self.create_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.update_conversion_workspace: gapic_v1.method.wrap_method( + self.update_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_conversion_workspace: gapic_v1.method.wrap_method( + self.delete_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.create_mapping_rule: gapic_v1.method.wrap_method( + self.create_mapping_rule, + default_timeout=None, + client_info=client_info, + ), + self.delete_mapping_rule: gapic_v1.method.wrap_method( + self.delete_mapping_rule, + default_timeout=None, + client_info=client_info, + ), + self.list_mapping_rules: gapic_v1.method.wrap_method( + self.list_mapping_rules, + default_timeout=None, + client_info=client_info, + ), + self.get_mapping_rule: gapic_v1.method.wrap_method( + self.get_mapping_rule, + default_timeout=None, + client_info=client_info, + ), + self.seed_conversion_workspace: gapic_v1.method.wrap_method( + self.seed_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.import_mapping_rules: gapic_v1.method.wrap_method( + self.import_mapping_rules, + default_timeout=60.0, + client_info=client_info, + ), + self.convert_conversion_workspace: gapic_v1.method.wrap_method( + self.convert_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.commit_conversion_workspace: gapic_v1.method.wrap_method( + self.commit_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.rollback_conversion_workspace: gapic_v1.method.wrap_method( + self.rollback_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.apply_conversion_workspace: gapic_v1.method.wrap_method( + self.apply_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.describe_database_entities: gapic_v1.method.wrap_method( + self.describe_database_entities, + default_timeout=60.0, + client_info=client_info, + ), + self.search_background_jobs: gapic_v1.method.wrap_method( + self.search_background_jobs, + default_timeout=60.0, + client_info=client_info, + ), + self.describe_conversion_workspace_revisions: gapic_v1.method.wrap_method( + self.describe_conversion_workspace_revisions, + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_static_ips: gapic_v1.method.wrap_method( + self.fetch_static_ips, + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_migration_jobs(self) -> Callable[ + [clouddms.ListMigrationJobsRequest], + Union[ + clouddms.ListMigrationJobsResponse, + Awaitable[clouddms.ListMigrationJobsResponse] + ]]: + raise NotImplementedError() + + @property + def get_migration_job(self) -> Callable[ + [clouddms.GetMigrationJobRequest], + Union[ + clouddms_resources.MigrationJob, + Awaitable[clouddms_resources.MigrationJob] + ]]: + raise NotImplementedError() + + @property + def create_migration_job(self) -> Callable[ + [clouddms.CreateMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_migration_job(self) -> Callable[ + [clouddms.UpdateMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_migration_job(self) -> Callable[ + [clouddms.DeleteMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def start_migration_job(self) -> Callable[ + [clouddms.StartMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def stop_migration_job(self) -> Callable[ + [clouddms.StopMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def resume_migration_job(self) -> Callable[ + [clouddms.ResumeMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def promote_migration_job(self) -> Callable[ + [clouddms.PromoteMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def verify_migration_job(self) -> Callable[ + [clouddms.VerifyMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def restart_migration_job(self) -> Callable[ + [clouddms.RestartMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def generate_ssh_script(self) -> Callable[ + [clouddms.GenerateSshScriptRequest], + Union[ + clouddms.SshScript, + Awaitable[clouddms.SshScript] + ]]: + raise NotImplementedError() + + @property + def generate_tcp_proxy_script(self) -> Callable[ + [clouddms.GenerateTcpProxyScriptRequest], + Union[ + clouddms.TcpProxyScript, + Awaitable[clouddms.TcpProxyScript] + ]]: + raise NotImplementedError() + + @property + def list_connection_profiles(self) -> Callable[ + [clouddms.ListConnectionProfilesRequest], + Union[ + clouddms.ListConnectionProfilesResponse, + Awaitable[clouddms.ListConnectionProfilesResponse] + ]]: + raise NotImplementedError() + + @property + def get_connection_profile(self) -> Callable[ + [clouddms.GetConnectionProfileRequest], + Union[ + clouddms_resources.ConnectionProfile, + Awaitable[clouddms_resources.ConnectionProfile] + ]]: + raise NotImplementedError() + + @property + def create_connection_profile(self) -> Callable[ + [clouddms.CreateConnectionProfileRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_connection_profile(self) -> Callable[ + [clouddms.UpdateConnectionProfileRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_connection_profile(self) -> Callable[ + [clouddms.DeleteConnectionProfileRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def create_private_connection(self) -> Callable[ + [clouddms.CreatePrivateConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_private_connection(self) -> Callable[ + [clouddms.GetPrivateConnectionRequest], + Union[ + clouddms_resources.PrivateConnection, + Awaitable[clouddms_resources.PrivateConnection] + ]]: + raise NotImplementedError() + + @property + def list_private_connections(self) -> Callable[ + [clouddms.ListPrivateConnectionsRequest], + Union[ + clouddms.ListPrivateConnectionsResponse, + Awaitable[clouddms.ListPrivateConnectionsResponse] + ]]: + raise NotImplementedError() + + @property + def delete_private_connection(self) -> Callable[ + [clouddms.DeletePrivateConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_conversion_workspace(self) -> Callable[ + [clouddms.GetConversionWorkspaceRequest], + Union[ + conversionworkspace_resources.ConversionWorkspace, + Awaitable[conversionworkspace_resources.ConversionWorkspace] + ]]: + raise NotImplementedError() + + @property + def list_conversion_workspaces(self) -> Callable[ + [clouddms.ListConversionWorkspacesRequest], + Union[ + clouddms.ListConversionWorkspacesResponse, + Awaitable[clouddms.ListConversionWorkspacesResponse] + ]]: + raise NotImplementedError() + + @property + def create_conversion_workspace(self) -> Callable[ + [clouddms.CreateConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_conversion_workspace(self) -> Callable[ + [clouddms.UpdateConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_conversion_workspace(self) -> Callable[ + [clouddms.DeleteConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def create_mapping_rule(self) -> Callable[ + [clouddms.CreateMappingRuleRequest], + Union[ + conversionworkspace_resources.MappingRule, + Awaitable[conversionworkspace_resources.MappingRule] + ]]: + raise NotImplementedError() + + @property + def delete_mapping_rule(self) -> Callable[ + [clouddms.DeleteMappingRuleRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_mapping_rules(self) -> Callable[ + [clouddms.ListMappingRulesRequest], + Union[ + clouddms.ListMappingRulesResponse, + Awaitable[clouddms.ListMappingRulesResponse] + ]]: + raise NotImplementedError() + + @property + def get_mapping_rule(self) -> Callable[ + [clouddms.GetMappingRuleRequest], + Union[ + conversionworkspace_resources.MappingRule, + Awaitable[conversionworkspace_resources.MappingRule] + ]]: + raise NotImplementedError() + + @property + def seed_conversion_workspace(self) -> Callable[ + [clouddms.SeedConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def import_mapping_rules(self) -> Callable[ + [clouddms.ImportMappingRulesRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def convert_conversion_workspace(self) -> Callable[ + [clouddms.ConvertConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def commit_conversion_workspace(self) -> Callable[ + [clouddms.CommitConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def rollback_conversion_workspace(self) -> Callable[ + [clouddms.RollbackConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def apply_conversion_workspace(self) -> Callable[ + [clouddms.ApplyConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def describe_database_entities(self) -> Callable[ + [clouddms.DescribeDatabaseEntitiesRequest], + Union[ + clouddms.DescribeDatabaseEntitiesResponse, + Awaitable[clouddms.DescribeDatabaseEntitiesResponse] + ]]: + raise NotImplementedError() + + @property + def search_background_jobs(self) -> Callable[ + [clouddms.SearchBackgroundJobsRequest], + Union[ + clouddms.SearchBackgroundJobsResponse, + Awaitable[clouddms.SearchBackgroundJobsResponse] + ]]: + raise NotImplementedError() + + @property + def describe_conversion_workspace_revisions(self) -> Callable[ + [clouddms.DescribeConversionWorkspaceRevisionsRequest], + Union[ + clouddms.DescribeConversionWorkspaceRevisionsResponse, + Awaitable[clouddms.DescribeConversionWorkspaceRevisionsResponse] + ]]: + raise NotImplementedError() + + @property + def fetch_static_ips(self) -> Callable[ + [clouddms.FetchStaticIpsRequest], + Union[ + clouddms.FetchStaticIpsResponse, + Awaitable[clouddms.FetchStaticIpsResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DataMigrationServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py new file mode 100644 index 000000000000..d3bcb357785c --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py @@ -0,0 +1,1569 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.clouddms_v1.types import clouddms +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DataMigrationServiceTransport, DEFAULT_CLIENT_INFO + + +class DataMigrationServiceGrpcTransport(DataMigrationServiceTransport): + """gRPC backend transport for DataMigrationService. + + Database Migration service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'datamigration.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'datamigration.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'datamigration.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_migration_jobs(self) -> Callable[ + [clouddms.ListMigrationJobsRequest], + clouddms.ListMigrationJobsResponse]: + r"""Return a callable for the list migration jobs method over gRPC. + + Lists migration jobs in a given project and location. + + Returns: + Callable[[~.ListMigrationJobsRequest], + ~.ListMigrationJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_migration_jobs' not in self._stubs: + self._stubs['list_migration_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListMigrationJobs', + request_serializer=clouddms.ListMigrationJobsRequest.serialize, + response_deserializer=clouddms.ListMigrationJobsResponse.deserialize, + ) + return self._stubs['list_migration_jobs'] + + @property + def get_migration_job(self) -> Callable[ + [clouddms.GetMigrationJobRequest], + clouddms_resources.MigrationJob]: + r"""Return a callable for the get migration job method over gRPC. + + Gets details of a single migration job. + + Returns: + Callable[[~.GetMigrationJobRequest], + ~.MigrationJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_migration_job' not in self._stubs: + self._stubs['get_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetMigrationJob', + request_serializer=clouddms.GetMigrationJobRequest.serialize, + response_deserializer=clouddms_resources.MigrationJob.deserialize, + ) + return self._stubs['get_migration_job'] + + @property + def create_migration_job(self) -> Callable[ + [clouddms.CreateMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the create migration job method over gRPC. + + Creates a new migration job in a given project and + location. + + Returns: + Callable[[~.CreateMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_migration_job' not in self._stubs: + self._stubs['create_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreateMigrationJob', + request_serializer=clouddms.CreateMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_migration_job'] + + @property + def update_migration_job(self) -> Callable[ + [clouddms.UpdateMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the update migration job method over gRPC. + + Updates the parameters of a single migration job. + + Returns: + Callable[[~.UpdateMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_migration_job' not in self._stubs: + self._stubs['update_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/UpdateMigrationJob', + request_serializer=clouddms.UpdateMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_migration_job'] + + @property + def delete_migration_job(self) -> Callable[ + [clouddms.DeleteMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete migration job method over gRPC. + + Deletes a single migration job. + + Returns: + Callable[[~.DeleteMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_migration_job' not in self._stubs: + self._stubs['delete_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeleteMigrationJob', + request_serializer=clouddms.DeleteMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_migration_job'] + + @property + def start_migration_job(self) -> Callable[ + [clouddms.StartMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the start migration job method over gRPC. + + Start an already created migration job. + + Returns: + Callable[[~.StartMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'start_migration_job' not in self._stubs: + self._stubs['start_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/StartMigrationJob', + request_serializer=clouddms.StartMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['start_migration_job'] + + @property + def stop_migration_job(self) -> Callable[ + [clouddms.StopMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the stop migration job method over gRPC. + + Stops a running migration job. + + Returns: + Callable[[~.StopMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'stop_migration_job' not in self._stubs: + self._stubs['stop_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/StopMigrationJob', + request_serializer=clouddms.StopMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['stop_migration_job'] + + @property + def resume_migration_job(self) -> Callable[ + [clouddms.ResumeMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the resume migration job method over gRPC. + + Resume a migration job that is currently stopped and + is resumable (was stopped during CDC phase). + + Returns: + Callable[[~.ResumeMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'resume_migration_job' not in self._stubs: + self._stubs['resume_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ResumeMigrationJob', + request_serializer=clouddms.ResumeMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['resume_migration_job'] + + @property + def promote_migration_job(self) -> Callable[ + [clouddms.PromoteMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the promote migration job method over gRPC. + + Promote a migration job, stopping replication to the + destination and promoting the destination to be a + standalone database. + + Returns: + Callable[[~.PromoteMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'promote_migration_job' not in self._stubs: + self._stubs['promote_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/PromoteMigrationJob', + request_serializer=clouddms.PromoteMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['promote_migration_job'] + + @property + def verify_migration_job(self) -> Callable[ + [clouddms.VerifyMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the verify migration job method over gRPC. + + Verify a migration job, making sure the destination + can reach the source and that all configuration and + prerequisites are met. + + Returns: + Callable[[~.VerifyMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'verify_migration_job' not in self._stubs: + self._stubs['verify_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/VerifyMigrationJob', + request_serializer=clouddms.VerifyMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['verify_migration_job'] + + @property + def restart_migration_job(self) -> Callable[ + [clouddms.RestartMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the restart migration job method over gRPC. + + Restart a stopped or failed migration job, resetting + the destination instance to its original state and + starting the migration process from scratch. + + Returns: + Callable[[~.RestartMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'restart_migration_job' not in self._stubs: + self._stubs['restart_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/RestartMigrationJob', + request_serializer=clouddms.RestartMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['restart_migration_job'] + + @property + def generate_ssh_script(self) -> Callable[ + [clouddms.GenerateSshScriptRequest], + clouddms.SshScript]: + r"""Return a callable for the generate ssh script method over gRPC. + + Generate a SSH configuration script to configure the + reverse SSH connectivity. + + Returns: + Callable[[~.GenerateSshScriptRequest], + ~.SshScript]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_ssh_script' not in self._stubs: + self._stubs['generate_ssh_script'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GenerateSshScript', + request_serializer=clouddms.GenerateSshScriptRequest.serialize, + response_deserializer=clouddms.SshScript.deserialize, + ) + return self._stubs['generate_ssh_script'] + + @property + def generate_tcp_proxy_script(self) -> Callable[ + [clouddms.GenerateTcpProxyScriptRequest], + clouddms.TcpProxyScript]: + r"""Return a callable for the generate tcp proxy script method over gRPC. + + Generate a TCP Proxy configuration script to + configure a cloud-hosted VM running a TCP Proxy. + + Returns: + Callable[[~.GenerateTcpProxyScriptRequest], + ~.TcpProxyScript]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_tcp_proxy_script' not in self._stubs: + self._stubs['generate_tcp_proxy_script'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GenerateTcpProxyScript', + request_serializer=clouddms.GenerateTcpProxyScriptRequest.serialize, + response_deserializer=clouddms.TcpProxyScript.deserialize, + ) + return self._stubs['generate_tcp_proxy_script'] + + @property + def list_connection_profiles(self) -> Callable[ + [clouddms.ListConnectionProfilesRequest], + clouddms.ListConnectionProfilesResponse]: + r"""Return a callable for the list connection profiles method over gRPC. + + Retrieves a list of all connection profiles in a + given project and location. + + Returns: + Callable[[~.ListConnectionProfilesRequest], + ~.ListConnectionProfilesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_connection_profiles' not in self._stubs: + self._stubs['list_connection_profiles'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListConnectionProfiles', + request_serializer=clouddms.ListConnectionProfilesRequest.serialize, + response_deserializer=clouddms.ListConnectionProfilesResponse.deserialize, + ) + return self._stubs['list_connection_profiles'] + + @property + def get_connection_profile(self) -> Callable[ + [clouddms.GetConnectionProfileRequest], + clouddms_resources.ConnectionProfile]: + r"""Return a callable for the get connection profile method over gRPC. + + Gets details of a single connection profile. + + Returns: + Callable[[~.GetConnectionProfileRequest], + ~.ConnectionProfile]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_connection_profile' not in self._stubs: + self._stubs['get_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetConnectionProfile', + request_serializer=clouddms.GetConnectionProfileRequest.serialize, + response_deserializer=clouddms_resources.ConnectionProfile.deserialize, + ) + return self._stubs['get_connection_profile'] + + @property + def create_connection_profile(self) -> Callable[ + [clouddms.CreateConnectionProfileRequest], + operations_pb2.Operation]: + r"""Return a callable for the create connection profile method over gRPC. + + Creates a new connection profile in a given project + and location. + + Returns: + Callable[[~.CreateConnectionProfileRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_connection_profile' not in self._stubs: + self._stubs['create_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreateConnectionProfile', + request_serializer=clouddms.CreateConnectionProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_connection_profile'] + + @property + def update_connection_profile(self) -> Callable[ + [clouddms.UpdateConnectionProfileRequest], + operations_pb2.Operation]: + r"""Return a callable for the update connection profile method over gRPC. + + Update the configuration of a single connection + profile. + + Returns: + Callable[[~.UpdateConnectionProfileRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_connection_profile' not in self._stubs: + self._stubs['update_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/UpdateConnectionProfile', + request_serializer=clouddms.UpdateConnectionProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_connection_profile'] + + @property + def delete_connection_profile(self) -> Callable[ + [clouddms.DeleteConnectionProfileRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete connection profile method over gRPC. + + Deletes a single Database Migration Service + connection profile. A connection profile can only be + deleted if it is not in use by any active migration + jobs. + + Returns: + Callable[[~.DeleteConnectionProfileRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_connection_profile' not in self._stubs: + self._stubs['delete_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeleteConnectionProfile', + request_serializer=clouddms.DeleteConnectionProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_connection_profile'] + + @property + def create_private_connection(self) -> Callable[ + [clouddms.CreatePrivateConnectionRequest], + operations_pb2.Operation]: + r"""Return a callable for the create private connection method over gRPC. + + Creates a new private connection in a given project + and location. + + Returns: + Callable[[~.CreatePrivateConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_private_connection' not in self._stubs: + self._stubs['create_private_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreatePrivateConnection', + request_serializer=clouddms.CreatePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_private_connection'] + + @property + def get_private_connection(self) -> Callable[ + [clouddms.GetPrivateConnectionRequest], + clouddms_resources.PrivateConnection]: + r"""Return a callable for the get private connection method over gRPC. + + Gets details of a single private connection. + + Returns: + Callable[[~.GetPrivateConnectionRequest], + ~.PrivateConnection]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_private_connection' not in self._stubs: + self._stubs['get_private_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetPrivateConnection', + request_serializer=clouddms.GetPrivateConnectionRequest.serialize, + response_deserializer=clouddms_resources.PrivateConnection.deserialize, + ) + return self._stubs['get_private_connection'] + + @property + def list_private_connections(self) -> Callable[ + [clouddms.ListPrivateConnectionsRequest], + clouddms.ListPrivateConnectionsResponse]: + r"""Return a callable for the list private connections method over gRPC. + + Retrieves a list of private connections in a given + project and location. + + Returns: + Callable[[~.ListPrivateConnectionsRequest], + ~.ListPrivateConnectionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_private_connections' not in self._stubs: + self._stubs['list_private_connections'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListPrivateConnections', + request_serializer=clouddms.ListPrivateConnectionsRequest.serialize, + response_deserializer=clouddms.ListPrivateConnectionsResponse.deserialize, + ) + return self._stubs['list_private_connections'] + + @property + def delete_private_connection(self) -> Callable[ + [clouddms.DeletePrivateConnectionRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete private connection method over gRPC. + + Deletes a single Database Migration Service private + connection. + + Returns: + Callable[[~.DeletePrivateConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_private_connection' not in self._stubs: + self._stubs['delete_private_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeletePrivateConnection', + request_serializer=clouddms.DeletePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_private_connection'] + + @property + def get_conversion_workspace(self) -> Callable[ + [clouddms.GetConversionWorkspaceRequest], + conversionworkspace_resources.ConversionWorkspace]: + r"""Return a callable for the get conversion workspace method over gRPC. + + Gets details of a single conversion workspace. + + Returns: + Callable[[~.GetConversionWorkspaceRequest], + ~.ConversionWorkspace]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_conversion_workspace' not in self._stubs: + self._stubs['get_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetConversionWorkspace', + request_serializer=clouddms.GetConversionWorkspaceRequest.serialize, + response_deserializer=conversionworkspace_resources.ConversionWorkspace.deserialize, + ) + return self._stubs['get_conversion_workspace'] + + @property + def list_conversion_workspaces(self) -> Callable[ + [clouddms.ListConversionWorkspacesRequest], + clouddms.ListConversionWorkspacesResponse]: + r"""Return a callable for the list conversion workspaces method over gRPC. + + Lists conversion workspaces in a given project and + location. + + Returns: + Callable[[~.ListConversionWorkspacesRequest], + ~.ListConversionWorkspacesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_conversion_workspaces' not in self._stubs: + self._stubs['list_conversion_workspaces'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListConversionWorkspaces', + request_serializer=clouddms.ListConversionWorkspacesRequest.serialize, + response_deserializer=clouddms.ListConversionWorkspacesResponse.deserialize, + ) + return self._stubs['list_conversion_workspaces'] + + @property + def create_conversion_workspace(self) -> Callable[ + [clouddms.CreateConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the create conversion workspace method over gRPC. + + Creates a new conversion workspace in a given project + and location. + + Returns: + Callable[[~.CreateConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_conversion_workspace' not in self._stubs: + self._stubs['create_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreateConversionWorkspace', + request_serializer=clouddms.CreateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_conversion_workspace'] + + @property + def update_conversion_workspace(self) -> Callable[ + [clouddms.UpdateConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the update conversion workspace method over gRPC. + + Updates the parameters of a single conversion + workspace. + + Returns: + Callable[[~.UpdateConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_conversion_workspace' not in self._stubs: + self._stubs['update_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/UpdateConversionWorkspace', + request_serializer=clouddms.UpdateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_conversion_workspace'] + + @property + def delete_conversion_workspace(self) -> Callable[ + [clouddms.DeleteConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete conversion workspace method over gRPC. + + Deletes a single conversion workspace. + + Returns: + Callable[[~.DeleteConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_conversion_workspace' not in self._stubs: + self._stubs['delete_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeleteConversionWorkspace', + request_serializer=clouddms.DeleteConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_conversion_workspace'] + + @property + def create_mapping_rule(self) -> Callable[ + [clouddms.CreateMappingRuleRequest], + conversionworkspace_resources.MappingRule]: + r"""Return a callable for the create mapping rule method over gRPC. + + Creates a new mapping rule for a given conversion + workspace. + + Returns: + Callable[[~.CreateMappingRuleRequest], + ~.MappingRule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_mapping_rule' not in self._stubs: + self._stubs['create_mapping_rule'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreateMappingRule', + request_serializer=clouddms.CreateMappingRuleRequest.serialize, + response_deserializer=conversionworkspace_resources.MappingRule.deserialize, + ) + return self._stubs['create_mapping_rule'] + + @property + def delete_mapping_rule(self) -> Callable[ + [clouddms.DeleteMappingRuleRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete mapping rule method over gRPC. + + Deletes a single mapping rule. + + Returns: + Callable[[~.DeleteMappingRuleRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_mapping_rule' not in self._stubs: + self._stubs['delete_mapping_rule'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeleteMappingRule', + request_serializer=clouddms.DeleteMappingRuleRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_mapping_rule'] + + @property + def list_mapping_rules(self) -> Callable[ + [clouddms.ListMappingRulesRequest], + clouddms.ListMappingRulesResponse]: + r"""Return a callable for the list mapping rules method over gRPC. + + Lists the mapping rules for a specific conversion + workspace. + + Returns: + Callable[[~.ListMappingRulesRequest], + ~.ListMappingRulesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_mapping_rules' not in self._stubs: + self._stubs['list_mapping_rules'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListMappingRules', + request_serializer=clouddms.ListMappingRulesRequest.serialize, + response_deserializer=clouddms.ListMappingRulesResponse.deserialize, + ) + return self._stubs['list_mapping_rules'] + + @property + def get_mapping_rule(self) -> Callable[ + [clouddms.GetMappingRuleRequest], + conversionworkspace_resources.MappingRule]: + r"""Return a callable for the get mapping rule method over gRPC. + + Gets the details of a mapping rule. + + Returns: + Callable[[~.GetMappingRuleRequest], + ~.MappingRule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_mapping_rule' not in self._stubs: + self._stubs['get_mapping_rule'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetMappingRule', + request_serializer=clouddms.GetMappingRuleRequest.serialize, + response_deserializer=conversionworkspace_resources.MappingRule.deserialize, + ) + return self._stubs['get_mapping_rule'] + + @property + def seed_conversion_workspace(self) -> Callable[ + [clouddms.SeedConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the seed conversion workspace method over gRPC. + + Imports a snapshot of the source database into the + conversion workspace. + + Returns: + Callable[[~.SeedConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'seed_conversion_workspace' not in self._stubs: + self._stubs['seed_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/SeedConversionWorkspace', + request_serializer=clouddms.SeedConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['seed_conversion_workspace'] + + @property + def import_mapping_rules(self) -> Callable[ + [clouddms.ImportMappingRulesRequest], + operations_pb2.Operation]: + r"""Return a callable for the import mapping rules method over gRPC. + + Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + Returns: + Callable[[~.ImportMappingRulesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'import_mapping_rules' not in self._stubs: + self._stubs['import_mapping_rules'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ImportMappingRules', + request_serializer=clouddms.ImportMappingRulesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['import_mapping_rules'] + + @property + def convert_conversion_workspace(self) -> Callable[ + [clouddms.ConvertConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the convert conversion workspace method over gRPC. + + Creates a draft tree schema for the destination + database. + + Returns: + Callable[[~.ConvertConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'convert_conversion_workspace' not in self._stubs: + self._stubs['convert_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ConvertConversionWorkspace', + request_serializer=clouddms.ConvertConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['convert_conversion_workspace'] + + @property + def commit_conversion_workspace(self) -> Callable[ + [clouddms.CommitConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the commit conversion workspace method over gRPC. + + Marks all the data in the conversion workspace as + committed. + + Returns: + Callable[[~.CommitConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'commit_conversion_workspace' not in self._stubs: + self._stubs['commit_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CommitConversionWorkspace', + request_serializer=clouddms.CommitConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['commit_conversion_workspace'] + + @property + def rollback_conversion_workspace(self) -> Callable[ + [clouddms.RollbackConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the rollback conversion workspace method over gRPC. + + Rolls back a conversion workspace to the last + committed snapshot. + + Returns: + Callable[[~.RollbackConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rollback_conversion_workspace' not in self._stubs: + self._stubs['rollback_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/RollbackConversionWorkspace', + request_serializer=clouddms.RollbackConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['rollback_conversion_workspace'] + + @property + def apply_conversion_workspace(self) -> Callable[ + [clouddms.ApplyConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the apply conversion workspace method over gRPC. + + Applies draft tree onto a specific destination + database. + + Returns: + Callable[[~.ApplyConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'apply_conversion_workspace' not in self._stubs: + self._stubs['apply_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ApplyConversionWorkspace', + request_serializer=clouddms.ApplyConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['apply_conversion_workspace'] + + @property + def describe_database_entities(self) -> Callable[ + [clouddms.DescribeDatabaseEntitiesRequest], + clouddms.DescribeDatabaseEntitiesResponse]: + r"""Return a callable for the describe database entities method over gRPC. + + Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + Returns: + Callable[[~.DescribeDatabaseEntitiesRequest], + ~.DescribeDatabaseEntitiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'describe_database_entities' not in self._stubs: + self._stubs['describe_database_entities'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DescribeDatabaseEntities', + request_serializer=clouddms.DescribeDatabaseEntitiesRequest.serialize, + response_deserializer=clouddms.DescribeDatabaseEntitiesResponse.deserialize, + ) + return self._stubs['describe_database_entities'] + + @property + def search_background_jobs(self) -> Callable[ + [clouddms.SearchBackgroundJobsRequest], + clouddms.SearchBackgroundJobsResponse]: + r"""Return a callable for the search background jobs method over gRPC. + + Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + Returns: + Callable[[~.SearchBackgroundJobsRequest], + ~.SearchBackgroundJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_background_jobs' not in self._stubs: + self._stubs['search_background_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/SearchBackgroundJobs', + request_serializer=clouddms.SearchBackgroundJobsRequest.serialize, + response_deserializer=clouddms.SearchBackgroundJobsResponse.deserialize, + ) + return self._stubs['search_background_jobs'] + + @property + def describe_conversion_workspace_revisions(self) -> Callable[ + [clouddms.DescribeConversionWorkspaceRevisionsRequest], + clouddms.DescribeConversionWorkspaceRevisionsResponse]: + r"""Return a callable for the describe conversion workspace + revisions method over gRPC. + + Retrieves a list of committed revisions of a specific + conversion workspace. + + Returns: + Callable[[~.DescribeConversionWorkspaceRevisionsRequest], + ~.DescribeConversionWorkspaceRevisionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'describe_conversion_workspace_revisions' not in self._stubs: + self._stubs['describe_conversion_workspace_revisions'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DescribeConversionWorkspaceRevisions', + request_serializer=clouddms.DescribeConversionWorkspaceRevisionsRequest.serialize, + response_deserializer=clouddms.DescribeConversionWorkspaceRevisionsResponse.deserialize, + ) + return self._stubs['describe_conversion_workspace_revisions'] + + @property + def fetch_static_ips(self) -> Callable[ + [clouddms.FetchStaticIpsRequest], + clouddms.FetchStaticIpsResponse]: + r"""Return a callable for the fetch static ips method over gRPC. + + Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + Returns: + Callable[[~.FetchStaticIpsRequest], + ~.FetchStaticIpsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_static_ips' not in self._stubs: + self._stubs['fetch_static_ips'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/FetchStaticIps', + request_serializer=clouddms.FetchStaticIpsRequest.serialize, + response_deserializer=clouddms.FetchStaticIpsResponse.deserialize, + ) + return self._stubs['fetch_static_ips'] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DataMigrationServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..4b48c5ea3540 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py @@ -0,0 +1,1835 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.clouddms_v1.types import clouddms +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DataMigrationServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import DataMigrationServiceGrpcTransport + + +class DataMigrationServiceGrpcAsyncIOTransport(DataMigrationServiceTransport): + """gRPC AsyncIO backend transport for DataMigrationService. + + Database Migration service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'datamigration.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'datamigration.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'datamigration.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_migration_jobs(self) -> Callable[ + [clouddms.ListMigrationJobsRequest], + Awaitable[clouddms.ListMigrationJobsResponse]]: + r"""Return a callable for the list migration jobs method over gRPC. + + Lists migration jobs in a given project and location. + + Returns: + Callable[[~.ListMigrationJobsRequest], + Awaitable[~.ListMigrationJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_migration_jobs' not in self._stubs: + self._stubs['list_migration_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListMigrationJobs', + request_serializer=clouddms.ListMigrationJobsRequest.serialize, + response_deserializer=clouddms.ListMigrationJobsResponse.deserialize, + ) + return self._stubs['list_migration_jobs'] + + @property + def get_migration_job(self) -> Callable[ + [clouddms.GetMigrationJobRequest], + Awaitable[clouddms_resources.MigrationJob]]: + r"""Return a callable for the get migration job method over gRPC. + + Gets details of a single migration job. + + Returns: + Callable[[~.GetMigrationJobRequest], + Awaitable[~.MigrationJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_migration_job' not in self._stubs: + self._stubs['get_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetMigrationJob', + request_serializer=clouddms.GetMigrationJobRequest.serialize, + response_deserializer=clouddms_resources.MigrationJob.deserialize, + ) + return self._stubs['get_migration_job'] + + @property + def create_migration_job(self) -> Callable[ + [clouddms.CreateMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create migration job method over gRPC. + + Creates a new migration job in a given project and + location. + + Returns: + Callable[[~.CreateMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_migration_job' not in self._stubs: + self._stubs['create_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreateMigrationJob', + request_serializer=clouddms.CreateMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_migration_job'] + + @property + def update_migration_job(self) -> Callable[ + [clouddms.UpdateMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update migration job method over gRPC. + + Updates the parameters of a single migration job. + + Returns: + Callable[[~.UpdateMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_migration_job' not in self._stubs: + self._stubs['update_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/UpdateMigrationJob', + request_serializer=clouddms.UpdateMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_migration_job'] + + @property + def delete_migration_job(self) -> Callable[ + [clouddms.DeleteMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete migration job method over gRPC. + + Deletes a single migration job. + + Returns: + Callable[[~.DeleteMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_migration_job' not in self._stubs: + self._stubs['delete_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeleteMigrationJob', + request_serializer=clouddms.DeleteMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_migration_job'] + + @property + def start_migration_job(self) -> Callable[ + [clouddms.StartMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the start migration job method over gRPC. + + Start an already created migration job. + + Returns: + Callable[[~.StartMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'start_migration_job' not in self._stubs: + self._stubs['start_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/StartMigrationJob', + request_serializer=clouddms.StartMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['start_migration_job'] + + @property + def stop_migration_job(self) -> Callable[ + [clouddms.StopMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the stop migration job method over gRPC. + + Stops a running migration job. + + Returns: + Callable[[~.StopMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'stop_migration_job' not in self._stubs: + self._stubs['stop_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/StopMigrationJob', + request_serializer=clouddms.StopMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['stop_migration_job'] + + @property + def resume_migration_job(self) -> Callable[ + [clouddms.ResumeMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the resume migration job method over gRPC. + + Resume a migration job that is currently stopped and + is resumable (was stopped during CDC phase). + + Returns: + Callable[[~.ResumeMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'resume_migration_job' not in self._stubs: + self._stubs['resume_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ResumeMigrationJob', + request_serializer=clouddms.ResumeMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['resume_migration_job'] + + @property + def promote_migration_job(self) -> Callable[ + [clouddms.PromoteMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the promote migration job method over gRPC. + + Promote a migration job, stopping replication to the + destination and promoting the destination to be a + standalone database. + + Returns: + Callable[[~.PromoteMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'promote_migration_job' not in self._stubs: + self._stubs['promote_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/PromoteMigrationJob', + request_serializer=clouddms.PromoteMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['promote_migration_job'] + + @property + def verify_migration_job(self) -> Callable[ + [clouddms.VerifyMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the verify migration job method over gRPC. + + Verify a migration job, making sure the destination + can reach the source and that all configuration and + prerequisites are met. + + Returns: + Callable[[~.VerifyMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'verify_migration_job' not in self._stubs: + self._stubs['verify_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/VerifyMigrationJob', + request_serializer=clouddms.VerifyMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['verify_migration_job'] + + @property + def restart_migration_job(self) -> Callable[ + [clouddms.RestartMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the restart migration job method over gRPC. + + Restart a stopped or failed migration job, resetting + the destination instance to its original state and + starting the migration process from scratch. + + Returns: + Callable[[~.RestartMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'restart_migration_job' not in self._stubs: + self._stubs['restart_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/RestartMigrationJob', + request_serializer=clouddms.RestartMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['restart_migration_job'] + + @property + def generate_ssh_script(self) -> Callable[ + [clouddms.GenerateSshScriptRequest], + Awaitable[clouddms.SshScript]]: + r"""Return a callable for the generate ssh script method over gRPC. + + Generate a SSH configuration script to configure the + reverse SSH connectivity. + + Returns: + Callable[[~.GenerateSshScriptRequest], + Awaitable[~.SshScript]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_ssh_script' not in self._stubs: + self._stubs['generate_ssh_script'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GenerateSshScript', + request_serializer=clouddms.GenerateSshScriptRequest.serialize, + response_deserializer=clouddms.SshScript.deserialize, + ) + return self._stubs['generate_ssh_script'] + + @property + def generate_tcp_proxy_script(self) -> Callable[ + [clouddms.GenerateTcpProxyScriptRequest], + Awaitable[clouddms.TcpProxyScript]]: + r"""Return a callable for the generate tcp proxy script method over gRPC. + + Generate a TCP Proxy configuration script to + configure a cloud-hosted VM running a TCP Proxy. + + Returns: + Callable[[~.GenerateTcpProxyScriptRequest], + Awaitable[~.TcpProxyScript]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_tcp_proxy_script' not in self._stubs: + self._stubs['generate_tcp_proxy_script'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GenerateTcpProxyScript', + request_serializer=clouddms.GenerateTcpProxyScriptRequest.serialize, + response_deserializer=clouddms.TcpProxyScript.deserialize, + ) + return self._stubs['generate_tcp_proxy_script'] + + @property + def list_connection_profiles(self) -> Callable[ + [clouddms.ListConnectionProfilesRequest], + Awaitable[clouddms.ListConnectionProfilesResponse]]: + r"""Return a callable for the list connection profiles method over gRPC. + + Retrieves a list of all connection profiles in a + given project and location. + + Returns: + Callable[[~.ListConnectionProfilesRequest], + Awaitable[~.ListConnectionProfilesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_connection_profiles' not in self._stubs: + self._stubs['list_connection_profiles'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListConnectionProfiles', + request_serializer=clouddms.ListConnectionProfilesRequest.serialize, + response_deserializer=clouddms.ListConnectionProfilesResponse.deserialize, + ) + return self._stubs['list_connection_profiles'] + + @property + def get_connection_profile(self) -> Callable[ + [clouddms.GetConnectionProfileRequest], + Awaitable[clouddms_resources.ConnectionProfile]]: + r"""Return a callable for the get connection profile method over gRPC. + + Gets details of a single connection profile. + + Returns: + Callable[[~.GetConnectionProfileRequest], + Awaitable[~.ConnectionProfile]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_connection_profile' not in self._stubs: + self._stubs['get_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetConnectionProfile', + request_serializer=clouddms.GetConnectionProfileRequest.serialize, + response_deserializer=clouddms_resources.ConnectionProfile.deserialize, + ) + return self._stubs['get_connection_profile'] + + @property + def create_connection_profile(self) -> Callable[ + [clouddms.CreateConnectionProfileRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create connection profile method over gRPC. + + Creates a new connection profile in a given project + and location. + + Returns: + Callable[[~.CreateConnectionProfileRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_connection_profile' not in self._stubs: + self._stubs['create_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreateConnectionProfile', + request_serializer=clouddms.CreateConnectionProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_connection_profile'] + + @property + def update_connection_profile(self) -> Callable[ + [clouddms.UpdateConnectionProfileRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update connection profile method over gRPC. + + Update the configuration of a single connection + profile. + + Returns: + Callable[[~.UpdateConnectionProfileRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_connection_profile' not in self._stubs: + self._stubs['update_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/UpdateConnectionProfile', + request_serializer=clouddms.UpdateConnectionProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_connection_profile'] + + @property + def delete_connection_profile(self) -> Callable[ + [clouddms.DeleteConnectionProfileRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete connection profile method over gRPC. + + Deletes a single Database Migration Service + connection profile. A connection profile can only be + deleted if it is not in use by any active migration + jobs. + + Returns: + Callable[[~.DeleteConnectionProfileRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_connection_profile' not in self._stubs: + self._stubs['delete_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeleteConnectionProfile', + request_serializer=clouddms.DeleteConnectionProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_connection_profile'] + + @property + def create_private_connection(self) -> Callable[ + [clouddms.CreatePrivateConnectionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create private connection method over gRPC. + + Creates a new private connection in a given project + and location. + + Returns: + Callable[[~.CreatePrivateConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_private_connection' not in self._stubs: + self._stubs['create_private_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreatePrivateConnection', + request_serializer=clouddms.CreatePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_private_connection'] + + @property + def get_private_connection(self) -> Callable[ + [clouddms.GetPrivateConnectionRequest], + Awaitable[clouddms_resources.PrivateConnection]]: + r"""Return a callable for the get private connection method over gRPC. + + Gets details of a single private connection. + + Returns: + Callable[[~.GetPrivateConnectionRequest], + Awaitable[~.PrivateConnection]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_private_connection' not in self._stubs: + self._stubs['get_private_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetPrivateConnection', + request_serializer=clouddms.GetPrivateConnectionRequest.serialize, + response_deserializer=clouddms_resources.PrivateConnection.deserialize, + ) + return self._stubs['get_private_connection'] + + @property + def list_private_connections(self) -> Callable[ + [clouddms.ListPrivateConnectionsRequest], + Awaitable[clouddms.ListPrivateConnectionsResponse]]: + r"""Return a callable for the list private connections method over gRPC. + + Retrieves a list of private connections in a given + project and location. + + Returns: + Callable[[~.ListPrivateConnectionsRequest], + Awaitable[~.ListPrivateConnectionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_private_connections' not in self._stubs: + self._stubs['list_private_connections'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListPrivateConnections', + request_serializer=clouddms.ListPrivateConnectionsRequest.serialize, + response_deserializer=clouddms.ListPrivateConnectionsResponse.deserialize, + ) + return self._stubs['list_private_connections'] + + @property + def delete_private_connection(self) -> Callable[ + [clouddms.DeletePrivateConnectionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete private connection method over gRPC. + + Deletes a single Database Migration Service private + connection. + + Returns: + Callable[[~.DeletePrivateConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_private_connection' not in self._stubs: + self._stubs['delete_private_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeletePrivateConnection', + request_serializer=clouddms.DeletePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_private_connection'] + + @property + def get_conversion_workspace(self) -> Callable[ + [clouddms.GetConversionWorkspaceRequest], + Awaitable[conversionworkspace_resources.ConversionWorkspace]]: + r"""Return a callable for the get conversion workspace method over gRPC. + + Gets details of a single conversion workspace. + + Returns: + Callable[[~.GetConversionWorkspaceRequest], + Awaitable[~.ConversionWorkspace]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_conversion_workspace' not in self._stubs: + self._stubs['get_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetConversionWorkspace', + request_serializer=clouddms.GetConversionWorkspaceRequest.serialize, + response_deserializer=conversionworkspace_resources.ConversionWorkspace.deserialize, + ) + return self._stubs['get_conversion_workspace'] + + @property + def list_conversion_workspaces(self) -> Callable[ + [clouddms.ListConversionWorkspacesRequest], + Awaitable[clouddms.ListConversionWorkspacesResponse]]: + r"""Return a callable for the list conversion workspaces method over gRPC. + + Lists conversion workspaces in a given project and + location. + + Returns: + Callable[[~.ListConversionWorkspacesRequest], + Awaitable[~.ListConversionWorkspacesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_conversion_workspaces' not in self._stubs: + self._stubs['list_conversion_workspaces'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListConversionWorkspaces', + request_serializer=clouddms.ListConversionWorkspacesRequest.serialize, + response_deserializer=clouddms.ListConversionWorkspacesResponse.deserialize, + ) + return self._stubs['list_conversion_workspaces'] + + @property + def create_conversion_workspace(self) -> Callable[ + [clouddms.CreateConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create conversion workspace method over gRPC. + + Creates a new conversion workspace in a given project + and location. + + Returns: + Callable[[~.CreateConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_conversion_workspace' not in self._stubs: + self._stubs['create_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreateConversionWorkspace', + request_serializer=clouddms.CreateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_conversion_workspace'] + + @property + def update_conversion_workspace(self) -> Callable[ + [clouddms.UpdateConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update conversion workspace method over gRPC. + + Updates the parameters of a single conversion + workspace. + + Returns: + Callable[[~.UpdateConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_conversion_workspace' not in self._stubs: + self._stubs['update_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/UpdateConversionWorkspace', + request_serializer=clouddms.UpdateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_conversion_workspace'] + + @property + def delete_conversion_workspace(self) -> Callable[ + [clouddms.DeleteConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete conversion workspace method over gRPC. + + Deletes a single conversion workspace. + + Returns: + Callable[[~.DeleteConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_conversion_workspace' not in self._stubs: + self._stubs['delete_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeleteConversionWorkspace', + request_serializer=clouddms.DeleteConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_conversion_workspace'] + + @property + def create_mapping_rule(self) -> Callable[ + [clouddms.CreateMappingRuleRequest], + Awaitable[conversionworkspace_resources.MappingRule]]: + r"""Return a callable for the create mapping rule method over gRPC. + + Creates a new mapping rule for a given conversion + workspace. + + Returns: + Callable[[~.CreateMappingRuleRequest], + Awaitable[~.MappingRule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_mapping_rule' not in self._stubs: + self._stubs['create_mapping_rule'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreateMappingRule', + request_serializer=clouddms.CreateMappingRuleRequest.serialize, + response_deserializer=conversionworkspace_resources.MappingRule.deserialize, + ) + return self._stubs['create_mapping_rule'] + + @property + def delete_mapping_rule(self) -> Callable[ + [clouddms.DeleteMappingRuleRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete mapping rule method over gRPC. + + Deletes a single mapping rule. + + Returns: + Callable[[~.DeleteMappingRuleRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_mapping_rule' not in self._stubs: + self._stubs['delete_mapping_rule'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeleteMappingRule', + request_serializer=clouddms.DeleteMappingRuleRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_mapping_rule'] + + @property + def list_mapping_rules(self) -> Callable[ + [clouddms.ListMappingRulesRequest], + Awaitable[clouddms.ListMappingRulesResponse]]: + r"""Return a callable for the list mapping rules method over gRPC. + + Lists the mapping rules for a specific conversion + workspace. + + Returns: + Callable[[~.ListMappingRulesRequest], + Awaitable[~.ListMappingRulesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_mapping_rules' not in self._stubs: + self._stubs['list_mapping_rules'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListMappingRules', + request_serializer=clouddms.ListMappingRulesRequest.serialize, + response_deserializer=clouddms.ListMappingRulesResponse.deserialize, + ) + return self._stubs['list_mapping_rules'] + + @property + def get_mapping_rule(self) -> Callable[ + [clouddms.GetMappingRuleRequest], + Awaitable[conversionworkspace_resources.MappingRule]]: + r"""Return a callable for the get mapping rule method over gRPC. + + Gets the details of a mapping rule. + + Returns: + Callable[[~.GetMappingRuleRequest], + Awaitable[~.MappingRule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_mapping_rule' not in self._stubs: + self._stubs['get_mapping_rule'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetMappingRule', + request_serializer=clouddms.GetMappingRuleRequest.serialize, + response_deserializer=conversionworkspace_resources.MappingRule.deserialize, + ) + return self._stubs['get_mapping_rule'] + + @property + def seed_conversion_workspace(self) -> Callable[ + [clouddms.SeedConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the seed conversion workspace method over gRPC. + + Imports a snapshot of the source database into the + conversion workspace. + + Returns: + Callable[[~.SeedConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'seed_conversion_workspace' not in self._stubs: + self._stubs['seed_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/SeedConversionWorkspace', + request_serializer=clouddms.SeedConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['seed_conversion_workspace'] + + @property + def import_mapping_rules(self) -> Callable[ + [clouddms.ImportMappingRulesRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the import mapping rules method over gRPC. + + Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + Returns: + Callable[[~.ImportMappingRulesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'import_mapping_rules' not in self._stubs: + self._stubs['import_mapping_rules'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ImportMappingRules', + request_serializer=clouddms.ImportMappingRulesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['import_mapping_rules'] + + @property + def convert_conversion_workspace(self) -> Callable[ + [clouddms.ConvertConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the convert conversion workspace method over gRPC. + + Creates a draft tree schema for the destination + database. + + Returns: + Callable[[~.ConvertConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'convert_conversion_workspace' not in self._stubs: + self._stubs['convert_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ConvertConversionWorkspace', + request_serializer=clouddms.ConvertConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['convert_conversion_workspace'] + + @property + def commit_conversion_workspace(self) -> Callable[ + [clouddms.CommitConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the commit conversion workspace method over gRPC. + + Marks all the data in the conversion workspace as + committed. + + Returns: + Callable[[~.CommitConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'commit_conversion_workspace' not in self._stubs: + self._stubs['commit_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CommitConversionWorkspace', + request_serializer=clouddms.CommitConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['commit_conversion_workspace'] + + @property + def rollback_conversion_workspace(self) -> Callable[ + [clouddms.RollbackConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the rollback conversion workspace method over gRPC. + + Rolls back a conversion workspace to the last + committed snapshot. + + Returns: + Callable[[~.RollbackConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rollback_conversion_workspace' not in self._stubs: + self._stubs['rollback_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/RollbackConversionWorkspace', + request_serializer=clouddms.RollbackConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['rollback_conversion_workspace'] + + @property + def apply_conversion_workspace(self) -> Callable[ + [clouddms.ApplyConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the apply conversion workspace method over gRPC. + + Applies draft tree onto a specific destination + database. + + Returns: + Callable[[~.ApplyConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'apply_conversion_workspace' not in self._stubs: + self._stubs['apply_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ApplyConversionWorkspace', + request_serializer=clouddms.ApplyConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['apply_conversion_workspace'] + + @property + def describe_database_entities(self) -> Callable[ + [clouddms.DescribeDatabaseEntitiesRequest], + Awaitable[clouddms.DescribeDatabaseEntitiesResponse]]: + r"""Return a callable for the describe database entities method over gRPC. + + Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + Returns: + Callable[[~.DescribeDatabaseEntitiesRequest], + Awaitable[~.DescribeDatabaseEntitiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'describe_database_entities' not in self._stubs: + self._stubs['describe_database_entities'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DescribeDatabaseEntities', + request_serializer=clouddms.DescribeDatabaseEntitiesRequest.serialize, + response_deserializer=clouddms.DescribeDatabaseEntitiesResponse.deserialize, + ) + return self._stubs['describe_database_entities'] + + @property + def search_background_jobs(self) -> Callable[ + [clouddms.SearchBackgroundJobsRequest], + Awaitable[clouddms.SearchBackgroundJobsResponse]]: + r"""Return a callable for the search background jobs method over gRPC. + + Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + Returns: + Callable[[~.SearchBackgroundJobsRequest], + Awaitable[~.SearchBackgroundJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_background_jobs' not in self._stubs: + self._stubs['search_background_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/SearchBackgroundJobs', + request_serializer=clouddms.SearchBackgroundJobsRequest.serialize, + response_deserializer=clouddms.SearchBackgroundJobsResponse.deserialize, + ) + return self._stubs['search_background_jobs'] + + @property + def describe_conversion_workspace_revisions(self) -> Callable[ + [clouddms.DescribeConversionWorkspaceRevisionsRequest], + Awaitable[clouddms.DescribeConversionWorkspaceRevisionsResponse]]: + r"""Return a callable for the describe conversion workspace + revisions method over gRPC. + + Retrieves a list of committed revisions of a specific + conversion workspace. + + Returns: + Callable[[~.DescribeConversionWorkspaceRevisionsRequest], + Awaitable[~.DescribeConversionWorkspaceRevisionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'describe_conversion_workspace_revisions' not in self._stubs: + self._stubs['describe_conversion_workspace_revisions'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DescribeConversionWorkspaceRevisions', + request_serializer=clouddms.DescribeConversionWorkspaceRevisionsRequest.serialize, + response_deserializer=clouddms.DescribeConversionWorkspaceRevisionsResponse.deserialize, + ) + return self._stubs['describe_conversion_workspace_revisions'] + + @property + def fetch_static_ips(self) -> Callable[ + [clouddms.FetchStaticIpsRequest], + Awaitable[clouddms.FetchStaticIpsResponse]]: + r"""Return a callable for the fetch static ips method over gRPC. + + Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + Returns: + Callable[[~.FetchStaticIpsRequest], + Awaitable[~.FetchStaticIpsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_static_ips' not in self._stubs: + self._stubs['fetch_static_ips'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/FetchStaticIps', + request_serializer=clouddms.FetchStaticIpsRequest.serialize, + response_deserializer=clouddms.FetchStaticIpsResponse.deserialize, + ) + return self._stubs['fetch_static_ips'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_migration_jobs: self._wrap_method( + self.list_migration_jobs, + default_timeout=60.0, + client_info=client_info, + ), + self.get_migration_job: self._wrap_method( + self.get_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.create_migration_job: self._wrap_method( + self.create_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.update_migration_job: self._wrap_method( + self.update_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_migration_job: self._wrap_method( + self.delete_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.start_migration_job: self._wrap_method( + self.start_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.stop_migration_job: self._wrap_method( + self.stop_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.resume_migration_job: self._wrap_method( + self.resume_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.promote_migration_job: self._wrap_method( + self.promote_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.verify_migration_job: self._wrap_method( + self.verify_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.restart_migration_job: self._wrap_method( + self.restart_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.generate_ssh_script: self._wrap_method( + self.generate_ssh_script, + default_timeout=60.0, + client_info=client_info, + ), + self.generate_tcp_proxy_script: self._wrap_method( + self.generate_tcp_proxy_script, + default_timeout=None, + client_info=client_info, + ), + self.list_connection_profiles: self._wrap_method( + self.list_connection_profiles, + default_timeout=60.0, + client_info=client_info, + ), + self.get_connection_profile: self._wrap_method( + self.get_connection_profile, + default_timeout=60.0, + client_info=client_info, + ), + self.create_connection_profile: self._wrap_method( + self.create_connection_profile, + default_timeout=60.0, + client_info=client_info, + ), + self.update_connection_profile: self._wrap_method( + self.update_connection_profile, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_connection_profile: self._wrap_method( + self.delete_connection_profile, + default_timeout=60.0, + client_info=client_info, + ), + self.create_private_connection: self._wrap_method( + self.create_private_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.get_private_connection: self._wrap_method( + self.get_private_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.list_private_connections: self._wrap_method( + self.list_private_connections, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_private_connection: self._wrap_method( + self.delete_private_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.get_conversion_workspace: self._wrap_method( + self.get_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.list_conversion_workspaces: self._wrap_method( + self.list_conversion_workspaces, + default_timeout=60.0, + client_info=client_info, + ), + self.create_conversion_workspace: self._wrap_method( + self.create_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.update_conversion_workspace: self._wrap_method( + self.update_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_conversion_workspace: self._wrap_method( + self.delete_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.create_mapping_rule: self._wrap_method( + self.create_mapping_rule, + default_timeout=None, + client_info=client_info, + ), + self.delete_mapping_rule: self._wrap_method( + self.delete_mapping_rule, + default_timeout=None, + client_info=client_info, + ), + self.list_mapping_rules: self._wrap_method( + self.list_mapping_rules, + default_timeout=None, + client_info=client_info, + ), + self.get_mapping_rule: self._wrap_method( + self.get_mapping_rule, + default_timeout=None, + client_info=client_info, + ), + self.seed_conversion_workspace: self._wrap_method( + self.seed_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.import_mapping_rules: self._wrap_method( + self.import_mapping_rules, + default_timeout=60.0, + client_info=client_info, + ), + self.convert_conversion_workspace: self._wrap_method( + self.convert_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.commit_conversion_workspace: self._wrap_method( + self.commit_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.rollback_conversion_workspace: self._wrap_method( + self.rollback_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.apply_conversion_workspace: self._wrap_method( + self.apply_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.describe_database_entities: self._wrap_method( + self.describe_database_entities, + default_timeout=60.0, + client_info=client_info, + ), + self.search_background_jobs: self._wrap_method( + self.search_background_jobs, + default_timeout=60.0, + client_info=client_info, + ), + self.describe_conversion_workspace_revisions: self._wrap_method( + self.describe_conversion_workspace_revisions, + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_static_ips: self._wrap_method( + self.fetch_static_ips, + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ( + 'DataMigrationServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/__init__.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/__init__.py new file mode 100644 index 000000000000..0dc52f4befa4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/__init__.py @@ -0,0 +1,294 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .clouddms import ( + ApplyConversionWorkspaceRequest, + CommitConversionWorkspaceRequest, + ConvertConversionWorkspaceRequest, + CreateConnectionProfileRequest, + CreateConversionWorkspaceRequest, + CreateMappingRuleRequest, + CreateMigrationJobRequest, + CreatePrivateConnectionRequest, + DeleteConnectionProfileRequest, + DeleteConversionWorkspaceRequest, + DeleteMappingRuleRequest, + DeleteMigrationJobRequest, + DeletePrivateConnectionRequest, + DescribeConversionWorkspaceRevisionsRequest, + DescribeConversionWorkspaceRevisionsResponse, + DescribeDatabaseEntitiesRequest, + DescribeDatabaseEntitiesResponse, + FetchStaticIpsRequest, + FetchStaticIpsResponse, + GenerateSshScriptRequest, + GenerateTcpProxyScriptRequest, + GetConnectionProfileRequest, + GetConversionWorkspaceRequest, + GetMappingRuleRequest, + GetMigrationJobRequest, + GetPrivateConnectionRequest, + ImportMappingRulesRequest, + ListConnectionProfilesRequest, + ListConnectionProfilesResponse, + ListConversionWorkspacesRequest, + ListConversionWorkspacesResponse, + ListMappingRulesRequest, + ListMappingRulesResponse, + ListMigrationJobsRequest, + ListMigrationJobsResponse, + ListPrivateConnectionsRequest, + ListPrivateConnectionsResponse, + OperationMetadata, + PromoteMigrationJobRequest, + RestartMigrationJobRequest, + ResumeMigrationJobRequest, + RollbackConversionWorkspaceRequest, + SearchBackgroundJobsRequest, + SearchBackgroundJobsResponse, + SeedConversionWorkspaceRequest, + SshScript, + StartMigrationJobRequest, + StopMigrationJobRequest, + TcpProxyScript, + UpdateConnectionProfileRequest, + UpdateConversionWorkspaceRequest, + UpdateMigrationJobRequest, + VerifyMigrationJobRequest, + VmCreationConfig, + VmSelectionConfig, + DatabaseEntityView, +) +from .clouddms_resources import ( + AlloyDbConnectionProfile, + AlloyDbSettings, + CloudSqlConnectionProfile, + CloudSqlSettings, + ConnectionProfile, + ConversionWorkspaceInfo, + DatabaseType, + ForwardSshTunnelConnectivity, + MigrationJob, + MigrationJobVerificationError, + MySqlConnectionProfile, + OracleConnectionProfile, + PostgreSqlConnectionProfile, + PrivateConnection, + PrivateConnectivity, + PrivateServiceConnectConnectivity, + ReverseSshConnectivity, + SqlAclEntry, + SqlIpConfig, + SslConfig, + StaticIpConnectivity, + StaticServiceIpConnectivity, + VpcPeeringConfig, + VpcPeeringConnectivity, + DatabaseEngine, + DatabaseProvider, + NetworkArchitecture, +) +from .conversionworkspace_resources import ( + ApplyHash, + AssignSpecificValue, + BackgroundJobLogEntry, + ColumnEntity, + ConditionalColumnSetValue, + ConstraintEntity, + ConversionWorkspace, + ConvertRowIdToColumn, + DatabaseEngineInfo, + DatabaseEntity, + DatabaseInstanceEntity, + DoubleComparisonFilter, + EntityDdl, + EntityIssue, + EntityMapping, + EntityMappingLogEntry, + EntityMove, + FilterTableColumns, + FunctionEntity, + IndexEntity, + IntComparisonFilter, + MappingRule, + MappingRuleFilter, + MaterializedViewEntity, + MultiColumnDatatypeChange, + MultiEntityRename, + PackageEntity, + RoundToScale, + SchemaEntity, + SequenceEntity, + SetTablePrimaryKey, + SingleColumnChange, + SingleEntityRename, + SinglePackageChange, + SourceNumericFilter, + SourceSqlChange, + SourceTextFilter, + StoredProcedureEntity, + SynonymEntity, + TableEntity, + TriggerEntity, + UDTEntity, + ValueListFilter, + ValueTransformation, + ViewEntity, + BackgroundJobType, + DatabaseEntityType, + EntityNameTransformation, + ImportRulesFileFormat, + NumericFilterOption, + ValueComparison, + ValuePresentInList, +) + +__all__ = ( + 'ApplyConversionWorkspaceRequest', + 'CommitConversionWorkspaceRequest', + 'ConvertConversionWorkspaceRequest', + 'CreateConnectionProfileRequest', + 'CreateConversionWorkspaceRequest', + 'CreateMappingRuleRequest', + 'CreateMigrationJobRequest', + 'CreatePrivateConnectionRequest', + 'DeleteConnectionProfileRequest', + 'DeleteConversionWorkspaceRequest', + 'DeleteMappingRuleRequest', + 'DeleteMigrationJobRequest', + 'DeletePrivateConnectionRequest', + 'DescribeConversionWorkspaceRevisionsRequest', + 'DescribeConversionWorkspaceRevisionsResponse', + 'DescribeDatabaseEntitiesRequest', + 'DescribeDatabaseEntitiesResponse', + 'FetchStaticIpsRequest', + 'FetchStaticIpsResponse', + 'GenerateSshScriptRequest', + 'GenerateTcpProxyScriptRequest', + 'GetConnectionProfileRequest', + 'GetConversionWorkspaceRequest', + 'GetMappingRuleRequest', + 'GetMigrationJobRequest', + 'GetPrivateConnectionRequest', + 'ImportMappingRulesRequest', + 'ListConnectionProfilesRequest', + 'ListConnectionProfilesResponse', + 'ListConversionWorkspacesRequest', + 'ListConversionWorkspacesResponse', + 'ListMappingRulesRequest', + 'ListMappingRulesResponse', + 'ListMigrationJobsRequest', + 'ListMigrationJobsResponse', + 'ListPrivateConnectionsRequest', + 'ListPrivateConnectionsResponse', + 'OperationMetadata', + 'PromoteMigrationJobRequest', + 'RestartMigrationJobRequest', + 'ResumeMigrationJobRequest', + 'RollbackConversionWorkspaceRequest', + 'SearchBackgroundJobsRequest', + 'SearchBackgroundJobsResponse', + 'SeedConversionWorkspaceRequest', + 'SshScript', + 'StartMigrationJobRequest', + 'StopMigrationJobRequest', + 'TcpProxyScript', + 'UpdateConnectionProfileRequest', + 'UpdateConversionWorkspaceRequest', + 'UpdateMigrationJobRequest', + 'VerifyMigrationJobRequest', + 'VmCreationConfig', + 'VmSelectionConfig', + 'DatabaseEntityView', + 'AlloyDbConnectionProfile', + 'AlloyDbSettings', + 'CloudSqlConnectionProfile', + 'CloudSqlSettings', + 'ConnectionProfile', + 'ConversionWorkspaceInfo', + 'DatabaseType', + 'ForwardSshTunnelConnectivity', + 'MigrationJob', + 'MigrationJobVerificationError', + 'MySqlConnectionProfile', + 'OracleConnectionProfile', + 'PostgreSqlConnectionProfile', + 'PrivateConnection', + 'PrivateConnectivity', + 'PrivateServiceConnectConnectivity', + 'ReverseSshConnectivity', + 'SqlAclEntry', + 'SqlIpConfig', + 'SslConfig', + 'StaticIpConnectivity', + 'StaticServiceIpConnectivity', + 'VpcPeeringConfig', + 'VpcPeeringConnectivity', + 'DatabaseEngine', + 'DatabaseProvider', + 'NetworkArchitecture', + 'ApplyHash', + 'AssignSpecificValue', + 'BackgroundJobLogEntry', + 'ColumnEntity', + 'ConditionalColumnSetValue', + 'ConstraintEntity', + 'ConversionWorkspace', + 'ConvertRowIdToColumn', + 'DatabaseEngineInfo', + 'DatabaseEntity', + 'DatabaseInstanceEntity', + 'DoubleComparisonFilter', + 'EntityDdl', + 'EntityIssue', + 'EntityMapping', + 'EntityMappingLogEntry', + 'EntityMove', + 'FilterTableColumns', + 'FunctionEntity', + 'IndexEntity', + 'IntComparisonFilter', + 'MappingRule', + 'MappingRuleFilter', + 'MaterializedViewEntity', + 'MultiColumnDatatypeChange', + 'MultiEntityRename', + 'PackageEntity', + 'RoundToScale', + 'SchemaEntity', + 'SequenceEntity', + 'SetTablePrimaryKey', + 'SingleColumnChange', + 'SingleEntityRename', + 'SinglePackageChange', + 'SourceNumericFilter', + 'SourceSqlChange', + 'SourceTextFilter', + 'StoredProcedureEntity', + 'SynonymEntity', + 'TableEntity', + 'TriggerEntity', + 'UDTEntity', + 'ValueListFilter', + 'ValueTransformation', + 'ViewEntity', + 'BackgroundJobType', + 'DatabaseEntityType', + 'EntityNameTransformation', + 'ImportRulesFileFormat', + 'NumericFilterOption', + 'ValueComparison', + 'ValuePresentInList', +) diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/clouddms.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/clouddms.py new file mode 100644 index 000000000000..813b124b48b1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/clouddms.py @@ -0,0 +1,2053 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.clouddms.v1', + manifest={ + 'DatabaseEntityView', + 'ListMigrationJobsRequest', + 'ListMigrationJobsResponse', + 'GetMigrationJobRequest', + 'CreateMigrationJobRequest', + 'UpdateMigrationJobRequest', + 'DeleteMigrationJobRequest', + 'StartMigrationJobRequest', + 'StopMigrationJobRequest', + 'ResumeMigrationJobRequest', + 'PromoteMigrationJobRequest', + 'VerifyMigrationJobRequest', + 'RestartMigrationJobRequest', + 'GenerateSshScriptRequest', + 'VmCreationConfig', + 'VmSelectionConfig', + 'SshScript', + 'GenerateTcpProxyScriptRequest', + 'TcpProxyScript', + 'ListConnectionProfilesRequest', + 'ListConnectionProfilesResponse', + 'GetConnectionProfileRequest', + 'CreateConnectionProfileRequest', + 'UpdateConnectionProfileRequest', + 'DeleteConnectionProfileRequest', + 'CreatePrivateConnectionRequest', + 'ListPrivateConnectionsRequest', + 'ListPrivateConnectionsResponse', + 'DeletePrivateConnectionRequest', + 'GetPrivateConnectionRequest', + 'OperationMetadata', + 'ListConversionWorkspacesRequest', + 'ListConversionWorkspacesResponse', + 'GetConversionWorkspaceRequest', + 'CreateConversionWorkspaceRequest', + 'UpdateConversionWorkspaceRequest', + 'DeleteConversionWorkspaceRequest', + 'CommitConversionWorkspaceRequest', + 'RollbackConversionWorkspaceRequest', + 'ApplyConversionWorkspaceRequest', + 'ListMappingRulesRequest', + 'ListMappingRulesResponse', + 'GetMappingRuleRequest', + 'SeedConversionWorkspaceRequest', + 'ConvertConversionWorkspaceRequest', + 'ImportMappingRulesRequest', + 'DescribeDatabaseEntitiesRequest', + 'DescribeDatabaseEntitiesResponse', + 'SearchBackgroundJobsRequest', + 'SearchBackgroundJobsResponse', + 'DescribeConversionWorkspaceRevisionsRequest', + 'DescribeConversionWorkspaceRevisionsResponse', + 'CreateMappingRuleRequest', + 'DeleteMappingRuleRequest', + 'FetchStaticIpsRequest', + 'FetchStaticIpsResponse', + }, +) + + +class DatabaseEntityView(proto.Enum): + r"""AIP-157 Partial Response view for Database Entity. + + Values: + DATABASE_ENTITY_VIEW_UNSPECIFIED (0): + Unspecified view. Defaults to basic view. + DATABASE_ENTITY_VIEW_BASIC (1): + Default view. Does not return DDLs or Issues. + DATABASE_ENTITY_VIEW_FULL (2): + Return full entity details including + mappings, ddl and issues. + DATABASE_ENTITY_VIEW_ROOT_SUMMARY (3): + Top-most (Database, Schema) nodes which are returned + contains summary details for their decendents such as the + number of entities per type and issues rollups. When this + view is used, only a single page of result is returned and + the page_size property of the request is ignored. The + returned page will only include the top-most node types. + """ + DATABASE_ENTITY_VIEW_UNSPECIFIED = 0 + DATABASE_ENTITY_VIEW_BASIC = 1 + DATABASE_ENTITY_VIEW_FULL = 2 + DATABASE_ENTITY_VIEW_ROOT_SUMMARY = 3 + + +class ListMigrationJobsRequest(proto.Message): + r"""Retrieves a list of all migration jobs in a given project and + location. + + Attributes: + parent (str): + Required. The parent which owns this + collection of migrationJobs. + page_size (int): + The maximum number of migration jobs to + return. The service may return fewer than this + value. If unspecified, at most 50 migration jobs + will be returned. The maximum value is 1000; + values above 1000 are coerced to 1000. + page_token (str): + The nextPageToken value received in the + previous call to migrationJobs.list, used in the + subsequent request to retrieve the next page of + results. On first call this should be left + blank. When paginating, all other parameters + provided to migrationJobs.list must match the + call that provided the page token. + filter (str): + A filter expression that filters migration jobs listed in + the response. The expression must specify the field name, a + comparison operator, and the value that you want to use for + filtering. The value must be a string, a number, or a + boolean. The comparison operator must be either =, !=, >, or + <. For example, list migration jobs created this year by + specifying **createTime %gt; + 2020-01-01T00:00:00.000000000Z.** You can also filter nested + fields. For example, you could specify + **reverseSshConnectivity.vmIp = "1.2.3.4"** to select all + migration jobs connecting through the specific SSH tunnel + bastion. + order_by (str): + Sort the results based on the migration job + name. Valid values are: "name", "name asc", and + "name desc". + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListMigrationJobsResponse(proto.Message): + r"""Response message for 'ListMigrationJobs' request. + + Attributes: + migration_jobs (MutableSequence[google.cloud.clouddms_v1.types.MigrationJob]): + The list of migration jobs objects. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + migration_jobs: MutableSequence[clouddms_resources.MigrationJob] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=clouddms_resources.MigrationJob, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetMigrationJobRequest(proto.Message): + r"""Request message for 'GetMigrationJob' request. + + Attributes: + name (str): + Required. Name of the migration job resource + to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateMigrationJobRequest(proto.Message): + r"""Request message to create a new Database Migration Service + migration job in the specified project and region. + + Attributes: + parent (str): + Required. The parent which owns this + collection of migration jobs. + migration_job_id (str): + Required. The ID of the instance to create. + migration_job (google.cloud.clouddms_v1.types.MigrationJob): + Required. Represents a `migration + job `__ + object. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + migration_job_id: str = proto.Field( + proto.STRING, + number=2, + ) + migration_job: clouddms_resources.MigrationJob = proto.Field( + proto.MESSAGE, + number=3, + message=clouddms_resources.MigrationJob, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateMigrationJobRequest(proto.Message): + r"""Request message for 'UpdateMigrationJob' request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the + fields to be overwritten by the update in the + conversion workspace resource. + migration_job (google.cloud.clouddms_v1.types.MigrationJob): + Required. The migration job parameters to + update. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + migration_job: clouddms_resources.MigrationJob = proto.Field( + proto.MESSAGE, + number=2, + message=clouddms_resources.MigrationJob, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteMigrationJobRequest(proto.Message): + r"""Request message for 'DeleteMigrationJob' request. + + Attributes: + name (str): + Required. Name of the migration job resource + to delete. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + force (bool): + The destination CloudSQL connection profile + is always deleted with the migration job. In + case of force delete, the destination CloudSQL + replica database is also deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class StartMigrationJobRequest(proto.Message): + r"""Request message for 'StartMigrationJob' request. + + Attributes: + name (str): + Name of the migration job resource to start. + skip_validation (bool): + Optional. Start the migration job without running prior + configuration verification. Defaults to ``false``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + skip_validation: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class StopMigrationJobRequest(proto.Message): + r"""Request message for 'StopMigrationJob' request. + + Attributes: + name (str): + Name of the migration job resource to stop. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ResumeMigrationJobRequest(proto.Message): + r"""Request message for 'ResumeMigrationJob' request. + + Attributes: + name (str): + Name of the migration job resource to resume. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PromoteMigrationJobRequest(proto.Message): + r"""Request message for 'PromoteMigrationJob' request. + + Attributes: + name (str): + Name of the migration job resource to + promote. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class VerifyMigrationJobRequest(proto.Message): + r"""Request message for 'VerifyMigrationJob' request. + + Attributes: + name (str): + Name of the migration job resource to verify. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the + changed fields to be verified. It will not + update the migration job. + migration_job (google.cloud.clouddms_v1.types.MigrationJob): + Optional. The changed migration job + parameters to verify. It will not update the + migration job. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + migration_job: clouddms_resources.MigrationJob = proto.Field( + proto.MESSAGE, + number=3, + message=clouddms_resources.MigrationJob, + ) + + +class RestartMigrationJobRequest(proto.Message): + r"""Request message for 'RestartMigrationJob' request. + + Attributes: + name (str): + Name of the migration job resource to + restart. + skip_validation (bool): + Optional. Restart the migration job without running prior + configuration verification. Defaults to ``false``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + skip_validation: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class GenerateSshScriptRequest(proto.Message): + r"""Request message for 'GenerateSshScript' request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + migration_job (str): + Name of the migration job resource to + generate the SSH script. + vm (str): + Required. Bastion VM Instance name to use or + to create. + vm_creation_config (google.cloud.clouddms_v1.types.VmCreationConfig): + The VM creation configuration + + This field is a member of `oneof`_ ``vm_config``. + vm_selection_config (google.cloud.clouddms_v1.types.VmSelectionConfig): + The VM selection configuration + + This field is a member of `oneof`_ ``vm_config``. + vm_port (int): + The port that will be open on the bastion + host. + """ + + migration_job: str = proto.Field( + proto.STRING, + number=1, + ) + vm: str = proto.Field( + proto.STRING, + number=2, + ) + vm_creation_config: 'VmCreationConfig' = proto.Field( + proto.MESSAGE, + number=100, + oneof='vm_config', + message='VmCreationConfig', + ) + vm_selection_config: 'VmSelectionConfig' = proto.Field( + proto.MESSAGE, + number=101, + oneof='vm_config', + message='VmSelectionConfig', + ) + vm_port: int = proto.Field( + proto.INT32, + number=3, + ) + + +class VmCreationConfig(proto.Message): + r"""VM creation configuration message + + Attributes: + vm_machine_type (str): + Required. VM instance machine type to create. + vm_zone (str): + The Google Cloud Platform zone to create the + VM in. + subnet (str): + The subnet name the vm needs to be created + in. + """ + + vm_machine_type: str = proto.Field( + proto.STRING, + number=1, + ) + vm_zone: str = proto.Field( + proto.STRING, + number=2, + ) + subnet: str = proto.Field( + proto.STRING, + number=3, + ) + + +class VmSelectionConfig(proto.Message): + r"""VM selection configuration message + + Attributes: + vm_zone (str): + Required. The Google Cloud Platform zone the + VM is located. + """ + + vm_zone: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SshScript(proto.Message): + r"""Response message for 'GenerateSshScript' request. + + Attributes: + script (str): + The ssh configuration script. + """ + + script: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GenerateTcpProxyScriptRequest(proto.Message): + r"""Request message for 'GenerateTcpProxyScript' request. + + Attributes: + migration_job (str): + Name of the migration job resource to + generate the TCP Proxy script. + vm_name (str): + Required. The name of the Compute instance + that will host the proxy. + vm_machine_type (str): + Required. The type of the Compute instance + that will host the proxy. + vm_zone (str): + Optional. The Google Cloud Platform zone to + create the VM in. The fully qualified name of + the zone must be specified, including the region + name, for example "us-central1-b". If not + specified, uses the "-b" zone of the destination + Connection Profile's region. + vm_subnet (str): + Required. The name of the subnet the Compute + instance will use for private connectivity. Must + be supplied in the form of + projects/{project}/regions/{region}/subnetworks/{subnetwork}. + Note: the region for the subnet must match the + Compute instance region. + """ + + migration_job: str = proto.Field( + proto.STRING, + number=1, + ) + vm_name: str = proto.Field( + proto.STRING, + number=2, + ) + vm_machine_type: str = proto.Field( + proto.STRING, + number=3, + ) + vm_zone: str = proto.Field( + proto.STRING, + number=4, + ) + vm_subnet: str = proto.Field( + proto.STRING, + number=5, + ) + + +class TcpProxyScript(proto.Message): + r"""Response message for 'GenerateTcpProxyScript' request. + + Attributes: + script (str): + The TCP Proxy configuration script. + """ + + script: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListConnectionProfilesRequest(proto.Message): + r"""Request message for 'ListConnectionProfiles' request. + + Attributes: + parent (str): + Required. The parent which owns this + collection of connection profiles. + page_size (int): + The maximum number of connection profiles to + return. The service may return fewer than this + value. If unspecified, at most 50 connection + profiles will be returned. The maximum value is + 1000; values above 1000 are coerced to 1000. + page_token (str): + A page token, received from a previous + ``ListConnectionProfiles`` call. Provide this to retrieve + the subsequent page. + + When paginating, all other parameters provided to + ``ListConnectionProfiles`` must match the call that provided + the page token. + filter (str): + A filter expression that filters connection profiles listed + in the response. The expression must specify the field name, + a comparison operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The comparison operator must be either =, !=, >, or + <. For example, list connection profiles created this year + by specifying **createTime %gt; + 2020-01-01T00:00:00.000000000Z**. You can also filter nested + fields. For example, you could specify **mySql.username = + %lt;my_username%gt;** to list all connection profiles + configured to connect with a specific username. + order_by (str): + A comma-separated list of fields to order + results according to. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListConnectionProfilesResponse(proto.Message): + r"""Response message for 'ListConnectionProfiles' request. + + Attributes: + connection_profiles (MutableSequence[google.cloud.clouddms_v1.types.ConnectionProfile]): + The response list of connection profiles. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + connection_profiles: MutableSequence[clouddms_resources.ConnectionProfile] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=clouddms_resources.ConnectionProfile, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetConnectionProfileRequest(proto.Message): + r"""Request message for 'GetConnectionProfile' request. + + Attributes: + name (str): + Required. Name of the connection profile + resource to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateConnectionProfileRequest(proto.Message): + r"""Request message for 'CreateConnectionProfile' request. + + Attributes: + parent (str): + Required. The parent which owns this + collection of connection profiles. + connection_profile_id (str): + Required. The connection profile identifier. + connection_profile (google.cloud.clouddms_v1.types.ConnectionProfile): + Required. The create request body including + the connection profile data + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + validate_only (bool): + Optional. Only validate the connection + profile, but don't create any resources. The + default is false. Only supported for Oracle + connection profiles. + skip_validation (bool): + Optional. Create the connection profile + without validating it. The default is false. + Only supported for Oracle connection profiles. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + connection_profile_id: str = proto.Field( + proto.STRING, + number=2, + ) + connection_profile: clouddms_resources.ConnectionProfile = proto.Field( + proto.MESSAGE, + number=3, + message=clouddms_resources.ConnectionProfile, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + skip_validation: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class UpdateConnectionProfileRequest(proto.Message): + r"""Request message for 'UpdateConnectionProfile' request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the + fields to be overwritten by the update in the + conversion workspace resource. + connection_profile (google.cloud.clouddms_v1.types.ConnectionProfile): + Required. The connection profile parameters + to update. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + validate_only (bool): + Optional. Only validate the connection + profile, but don't update any resources. The + default is false. Only supported for Oracle + connection profiles. + skip_validation (bool): + Optional. Update the connection profile + without validating it. The default is false. + Only supported for Oracle connection profiles. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + connection_profile: clouddms_resources.ConnectionProfile = proto.Field( + proto.MESSAGE, + number=2, + message=clouddms_resources.ConnectionProfile, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + skip_validation: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteConnectionProfileRequest(proto.Message): + r"""Request message for 'DeleteConnectionProfile' request. + + Attributes: + name (str): + Required. Name of the connection profile + resource to delete. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + force (bool): + In case of force delete, the CloudSQL replica + database is also deleted (only for CloudSQL + connection profile). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class CreatePrivateConnectionRequest(proto.Message): + r"""Request message to create a new private connection in the + specified project and region. + + Attributes: + parent (str): + Required. The parent that owns the collection + of PrivateConnections. + private_connection_id (str): + Required. The private connection identifier. + private_connection (google.cloud.clouddms_v1.types.PrivateConnection): + Required. The private connection resource to + create. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + skip_validation (bool): + Optional. If set to true, will skip + validations. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + private_connection_id: str = proto.Field( + proto.STRING, + number=2, + ) + private_connection: clouddms_resources.PrivateConnection = proto.Field( + proto.MESSAGE, + number=3, + message=clouddms_resources.PrivateConnection, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + skip_validation: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class ListPrivateConnectionsRequest(proto.Message): + r"""Request message to retrieve a list of private connections in + a given project and location. + + Attributes: + parent (str): + Required. The parent that owns the collection + of private connections. + page_size (int): + Maximum number of private connections to + return. If unspecified, at most 50 private + connections that are returned. The maximum value + is 1000; values above 1000 are coerced to 1000. + page_token (str): + Page token received from a previous + ``ListPrivateConnections`` call. Provide this to retrieve + the subsequent page. + + When paginating, all other parameters provided to + ``ListPrivateConnections`` must match the call that provided + the page token. + filter (str): + A filter expression that filters private connections listed + in the response. The expression must specify the field name, + a comparison operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The comparison operator must be either =, !=, >, or + <. For example, list private connections created this year + by specifying **createTime %gt; + 2021-01-01T00:00:00.000000000Z**. + order_by (str): + Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListPrivateConnectionsResponse(proto.Message): + r"""Response message for 'ListPrivateConnections' request. + + Attributes: + private_connections (MutableSequence[google.cloud.clouddms_v1.types.PrivateConnection]): + List of private connections. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + private_connections: MutableSequence[clouddms_resources.PrivateConnection] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=clouddms_resources.PrivateConnection, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeletePrivateConnectionRequest(proto.Message): + r"""Request message to delete a private connection. + + Attributes: + name (str): + Required. The name of the private connection + to delete. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetPrivateConnectionRequest(proto.Message): + r"""Request message to get a private connection resource. + + Attributes: + name (str): + Required. The name of the private connection + to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ListConversionWorkspacesRequest(proto.Message): + r"""Retrieve a list of all conversion workspaces in a given + project and location. + + Attributes: + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + page_size (int): + The maximum number of conversion workspaces + to return. The service may return fewer than + this value. If unspecified, at most 50 sets are + returned. + page_token (str): + The nextPageToken value received in the + previous call to conversionWorkspaces.list, used + in the subsequent request to retrieve the next + page of results. On first call this should be + left blank. When paginating, all other + parameters provided to conversionWorkspaces.list + must match the call that provided the page + token. + filter (str): + A filter expression that filters conversion workspaces + listed in the response. The expression must specify the + field name, a comparison operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The comparison operator must be either + =, !=, >, or <. For example, list conversion workspaces + created this year by specifying **createTime %gt; + 2020-01-01T00:00:00.000000000Z.** You can also filter nested + fields. For example, you could specify **source.version = + "12.c.1"** to select all conversion workspaces with source + database version equal to 12.c.1. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListConversionWorkspacesResponse(proto.Message): + r"""Response message for 'ListConversionWorkspaces' request. + + Attributes: + conversion_workspaces (MutableSequence[google.cloud.clouddms_v1.types.ConversionWorkspace]): + The list of conversion workspace objects. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + conversion_workspaces: MutableSequence[conversionworkspace_resources.ConversionWorkspace] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.ConversionWorkspace, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetConversionWorkspaceRequest(proto.Message): + r"""Request message for 'GetConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateConversionWorkspaceRequest(proto.Message): + r"""Request message to create a new Conversion Workspace + in the specified project and region. + + Attributes: + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + conversion_workspace_id (str): + Required. The ID of the conversion workspace + to create. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. Represents a conversion workspace + object. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + conversion_workspace_id: str = proto.Field( + proto.STRING, + number=2, + ) + conversion_workspace: conversionworkspace_resources.ConversionWorkspace = proto.Field( + proto.MESSAGE, + number=3, + message=conversionworkspace_resources.ConversionWorkspace, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateConversionWorkspaceRequest(proto.Message): + r"""Request message for 'UpdateConversionWorkspace' request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the + fields to be overwritten by the update in the + conversion workspace resource. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. The conversion workspace parameters + to update. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + conversion_workspace: conversionworkspace_resources.ConversionWorkspace = proto.Field( + proto.MESSAGE, + number=2, + message=conversionworkspace_resources.ConversionWorkspace, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteConversionWorkspaceRequest(proto.Message): + r"""Request message for 'DeleteConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to delete. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + force (bool): + Force delete the conversion workspace, even + if there's a running migration that is using the + workspace. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class CommitConversionWorkspaceRequest(proto.Message): + r"""Request message for 'CommitConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to commit. + commit_name (str): + Optional. Optional name of the commit. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + commit_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RollbackConversionWorkspaceRequest(proto.Message): + r"""Request message for 'RollbackConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to roll back to. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ApplyConversionWorkspaceRequest(proto.Message): + r"""Request message for 'ApplyConversionWorkspace' request. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The name of the conversion workspace resource for + which to apply the draft tree. Must be in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + filter (str): + Filter which entities to apply. Leaving this + field empty will apply all of the entities. + Supports Google AIP 160 based filtering. + dry_run (bool): + Optional. Only validates the apply process, + but doesn't change the destination database. + Only works for PostgreSQL destination connection + profile. + auto_commit (bool): + Optional. Specifies whether the conversion + workspace is to be committed automatically after + the apply. + connection_profile (str): + Optional. Fully qualified (Uri) name of the + destination connection profile. + + This field is a member of `oneof`_ ``destination``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + dry_run: bool = proto.Field( + proto.BOOL, + number=3, + ) + auto_commit: bool = proto.Field( + proto.BOOL, + number=4, + ) + connection_profile: str = proto.Field( + proto.STRING, + number=100, + oneof='destination', + ) + + +class ListMappingRulesRequest(proto.Message): + r"""Retrieve a list of all mapping rules in a given conversion + workspace. + + Attributes: + parent (str): + Required. Name of the conversion workspace resource whose + mapping rules are listed in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + page_size (int): + The maximum number of rules to return. The + service may return fewer than this value. + page_token (str): + The nextPageToken value received in the + previous call to mappingRules.list, used in the + subsequent request to retrieve the next page of + results. On first call this should be left + blank. When paginating, all other parameters + provided to mappingRules.list must match the + call that provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListMappingRulesResponse(proto.Message): + r"""Response message for 'ListMappingRulesRequest' request. + + Attributes: + mapping_rules (MutableSequence[google.cloud.clouddms_v1.types.MappingRule]): + The list of conversion workspace mapping + rules. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + mapping_rules: MutableSequence[conversionworkspace_resources.MappingRule] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.MappingRule, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetMappingRuleRequest(proto.Message): + r"""Request message for 'GetMappingRule' request. + + Attributes: + name (str): + Required. Name of the mapping rule resource + to get. Example: + conversionWorkspaces/123/mappingRules/rule123 + + In order to retrieve a previous revision of the + mapping rule, also provide the revision ID. + Example: + + conversionWorkspace/123/mappingRules/rule123@c7cfa2a8c7cfa2a8c7cfa2a8c7cfa2a8 + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SeedConversionWorkspaceRequest(proto.Message): + r"""Request message for 'SeedConversionWorkspace' request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Name of the conversion workspace resource to seed with new + database structure, in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + auto_commit (bool): + Should the conversion workspace be committed + automatically after the seed operation. + source_connection_profile (str): + Optional. Fully qualified (Uri) name of the + source connection profile. + + This field is a member of `oneof`_ ``seed_from``. + destination_connection_profile (str): + Optional. Fully qualified (Uri) name of the + destination connection profile. + + This field is a member of `oneof`_ ``seed_from``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + auto_commit: bool = proto.Field( + proto.BOOL, + number=2, + ) + source_connection_profile: str = proto.Field( + proto.STRING, + number=100, + oneof='seed_from', + ) + destination_connection_profile: str = proto.Field( + proto.STRING, + number=101, + oneof='seed_from', + ) + + +class ConvertConversionWorkspaceRequest(proto.Message): + r"""Request message for 'ConvertConversionWorkspace' request. + + Attributes: + name (str): + Name of the conversion workspace resource to convert in the + form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + auto_commit (bool): + Optional. Specifies whether the conversion + workspace is to be committed automatically after + the conversion. + filter (str): + Optional. Filter the entities to convert. + Leaving this field empty will convert all of the + entities. Supports Google AIP-160 style + filtering. + convert_full_path (bool): + Optional. Automatically convert the full + entity path for each entity specified by the + filter. For example, if the filter specifies a + table, that table schema (and database if there + is one) will also be converted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + auto_commit: bool = proto.Field( + proto.BOOL, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + convert_full_path: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class ImportMappingRulesRequest(proto.Message): + r"""Request message for 'ImportMappingRules' request. + + Attributes: + parent (str): + Required. Name of the conversion workspace resource to + import the rules to in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + rules_format (google.cloud.clouddms_v1.types.ImportRulesFileFormat): + Required. The format of the rules content + file. + rules_files (MutableSequence[google.cloud.clouddms_v1.types.ImportMappingRulesRequest.RulesFile]): + Required. One or more rules files. + auto_commit (bool): + Required. Should the conversion workspace be + committed automatically after the import + operation. + """ + + class RulesFile(proto.Message): + r"""Details of a single rules file. + + Attributes: + rules_source_filename (str): + Required. The filename of the rules that + needs to be converted. The filename is used + mainly so that future logs of the import rules + job contain it, and can therefore be searched by + it. + rules_content (str): + Required. The text content of the rules that + needs to be converted. + """ + + rules_source_filename: str = proto.Field( + proto.STRING, + number=1, + ) + rules_content: str = proto.Field( + proto.STRING, + number=2, + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + rules_format: conversionworkspace_resources.ImportRulesFileFormat = proto.Field( + proto.ENUM, + number=2, + enum=conversionworkspace_resources.ImportRulesFileFormat, + ) + rules_files: MutableSequence[RulesFile] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=RulesFile, + ) + auto_commit: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class DescribeDatabaseEntitiesRequest(proto.Message): + r"""Request message for 'DescribeDatabaseEntities' request. + + Attributes: + conversion_workspace (str): + Required. Name of the conversion workspace resource whose + database entities are described. Must be in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + page_size (int): + Optional. The maximum number of entities to + return. The service may return fewer entities + than the value specifies. + page_token (str): + Optional. The nextPageToken value received in + the previous call to + conversionWorkspace.describeDatabaseEntities, + used in the subsequent request to retrieve the + next page of results. On first call this should + be left blank. When paginating, all other + parameters provided to + conversionWorkspace.describeDatabaseEntities + must match the call that provided the page + token. + tree (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest.DBTreeType): + Required. The tree to fetch. + uncommitted (bool): + Optional. Whether to retrieve the latest committed version + of the entities or the latest version. This field is ignored + if a specific commit_id is specified. + commit_id (str): + Optional. Request a specific commit ID. If + not specified, the entities from the latest + commit are returned. + filter (str): + Optional. Filter the returned entities based + on AIP-160 standard. + view (google.cloud.clouddms_v1.types.DatabaseEntityView): + Optional. Results view based on AIP-157 + """ + class DBTreeType(proto.Enum): + r"""The type of a tree to return + + Values: + DB_TREE_TYPE_UNSPECIFIED (0): + Unspecified tree type. + SOURCE_TREE (1): + The source database tree. + DRAFT_TREE (2): + The draft database tree. + DESTINATION_TREE (3): + The destination database tree. + """ + DB_TREE_TYPE_UNSPECIFIED = 0 + SOURCE_TREE = 1 + DRAFT_TREE = 2 + DESTINATION_TREE = 3 + + conversion_workspace: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + tree: DBTreeType = proto.Field( + proto.ENUM, + number=6, + enum=DBTreeType, + ) + uncommitted: bool = proto.Field( + proto.BOOL, + number=11, + ) + commit_id: str = proto.Field( + proto.STRING, + number=12, + ) + filter: str = proto.Field( + proto.STRING, + number=13, + ) + view: 'DatabaseEntityView' = proto.Field( + proto.ENUM, + number=14, + enum='DatabaseEntityView', + ) + + +class DescribeDatabaseEntitiesResponse(proto.Message): + r"""Response message for 'DescribeDatabaseEntities' request. + + Attributes: + database_entities (MutableSequence[google.cloud.clouddms_v1.types.DatabaseEntity]): + The list of database entities for the + conversion workspace. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + database_entities: MutableSequence[conversionworkspace_resources.DatabaseEntity] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.DatabaseEntity, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SearchBackgroundJobsRequest(proto.Message): + r"""Request message for 'SearchBackgroundJobs' request. + + Attributes: + conversion_workspace (str): + Required. Name of the conversion workspace resource whose + jobs are listed, in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + return_most_recent_per_job_type (bool): + Optional. Whether or not to return just the + most recent job per job type, + max_size (int): + Optional. The maximum number of jobs to + return. The service may return fewer than this + value. If unspecified, at most 100 jobs are + returned. The maximum value is 100; values above + 100 are coerced to 100. + completed_until_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. If provided, only returns jobs that + completed until (not including) the given + timestamp. + """ + + conversion_workspace: str = proto.Field( + proto.STRING, + number=1, + ) + return_most_recent_per_job_type: bool = proto.Field( + proto.BOOL, + number=2, + ) + max_size: int = proto.Field( + proto.INT32, + number=3, + ) + completed_until_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class SearchBackgroundJobsResponse(proto.Message): + r"""Response message for 'SearchBackgroundJobs' request. + + Attributes: + jobs (MutableSequence[google.cloud.clouddms_v1.types.BackgroundJobLogEntry]): + The list of conversion workspace mapping + rules. + """ + + jobs: MutableSequence[conversionworkspace_resources.BackgroundJobLogEntry] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.BackgroundJobLogEntry, + ) + + +class DescribeConversionWorkspaceRevisionsRequest(proto.Message): + r"""Request message for 'DescribeConversionWorkspaceRevisions' + request. + + Attributes: + conversion_workspace (str): + Required. Name of the conversion workspace resource whose + revisions are listed. Must be in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + commit_id (str): + Optional. Optional filter to request a + specific commit ID. + """ + + conversion_workspace: str = proto.Field( + proto.STRING, + number=1, + ) + commit_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DescribeConversionWorkspaceRevisionsResponse(proto.Message): + r"""Response message for 'DescribeConversionWorkspaceRevisions' + request. + + Attributes: + revisions (MutableSequence[google.cloud.clouddms_v1.types.ConversionWorkspace]): + The list of conversion workspace revisions. + """ + + revisions: MutableSequence[conversionworkspace_resources.ConversionWorkspace] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.ConversionWorkspace, + ) + + +class CreateMappingRuleRequest(proto.Message): + r"""Request message for 'CreateMappingRule' command. + + Attributes: + parent (str): + Required. The parent which owns this + collection of mapping rules. + mapping_rule_id (str): + Required. The ID of the rule to create. + mapping_rule (google.cloud.clouddms_v1.types.MappingRule): + Required. Represents a [mapping rule] + (https://cloud.google.com/database-migration/reference/rest/v1/projects.locations.mappingRules) + object. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + mapping_rule_id: str = proto.Field( + proto.STRING, + number=2, + ) + mapping_rule: conversionworkspace_resources.MappingRule = proto.Field( + proto.MESSAGE, + number=3, + message=conversionworkspace_resources.MappingRule, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteMappingRuleRequest(proto.Message): + r"""Request message for 'DeleteMappingRule' request. + + Attributes: + name (str): + Required. Name of the mapping rule resource + to delete. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class FetchStaticIpsRequest(proto.Message): + r"""Request message for 'FetchStaticIps' request. + + Attributes: + name (str): + Required. The resource name for the location for which + static IPs should be returned. Must be in the format + ``projects/*/locations/*``. + page_size (int): + Maximum number of IPs to return. + page_token (str): + A page token, received from a previous ``FetchStaticIps`` + call. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchStaticIpsResponse(proto.Message): + r"""Response message for a 'FetchStaticIps' request. + + Attributes: + static_ips (MutableSequence[str]): + List of static IPs. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + static_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/clouddms_resources.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/clouddms_resources.py new file mode 100644 index 000000000000..2688516fa617 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/clouddms_resources.py @@ -0,0 +1,2119 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.clouddms.v1', + manifest={ + 'NetworkArchitecture', + 'DatabaseEngine', + 'DatabaseProvider', + 'SslConfig', + 'MySqlConnectionProfile', + 'PostgreSqlConnectionProfile', + 'OracleConnectionProfile', + 'CloudSqlConnectionProfile', + 'AlloyDbConnectionProfile', + 'SqlAclEntry', + 'SqlIpConfig', + 'CloudSqlSettings', + 'AlloyDbSettings', + 'StaticIpConnectivity', + 'PrivateServiceConnectConnectivity', + 'ReverseSshConnectivity', + 'VpcPeeringConnectivity', + 'ForwardSshTunnelConnectivity', + 'StaticServiceIpConnectivity', + 'PrivateConnectivity', + 'DatabaseType', + 'MigrationJob', + 'ConversionWorkspaceInfo', + 'ConnectionProfile', + 'MigrationJobVerificationError', + 'PrivateConnection', + 'VpcPeeringConfig', + }, +) + + +class NetworkArchitecture(proto.Enum): + r""" + + Values: + NETWORK_ARCHITECTURE_UNSPECIFIED (0): + No description available. + NETWORK_ARCHITECTURE_OLD_CSQL_PRODUCER (1): + Instance is in Cloud SQL's old producer + network architecture. + NETWORK_ARCHITECTURE_NEW_CSQL_PRODUCER (2): + Instance is in Cloud SQL's new producer + network architecture. + """ + NETWORK_ARCHITECTURE_UNSPECIFIED = 0 + NETWORK_ARCHITECTURE_OLD_CSQL_PRODUCER = 1 + NETWORK_ARCHITECTURE_NEW_CSQL_PRODUCER = 2 + + +class DatabaseEngine(proto.Enum): + r"""The database engine types. + + Values: + DATABASE_ENGINE_UNSPECIFIED (0): + The source database engine of the migration + job is unknown. + MYSQL (1): + The source engine is MySQL. + POSTGRESQL (2): + The source engine is PostgreSQL. + ORACLE (4): + The source engine is Oracle. + """ + DATABASE_ENGINE_UNSPECIFIED = 0 + MYSQL = 1 + POSTGRESQL = 2 + ORACLE = 4 + + +class DatabaseProvider(proto.Enum): + r"""The database providers. + + Values: + DATABASE_PROVIDER_UNSPECIFIED (0): + The database provider is unknown. + CLOUDSQL (1): + CloudSQL runs the database. + RDS (2): + RDS runs the database. + AURORA (3): + Amazon Aurora. + ALLOYDB (4): + AlloyDB. + """ + DATABASE_PROVIDER_UNSPECIFIED = 0 + CLOUDSQL = 1 + RDS = 2 + AURORA = 3 + ALLOYDB = 4 + + +class SslConfig(proto.Message): + r"""SSL configuration information. + + Attributes: + type_ (google.cloud.clouddms_v1.types.SslConfig.SslType): + Output only. The ssl config type according to 'client_key', + 'client_certificate' and 'ca_certificate'. + client_key (str): + Input only. The unencrypted PKCS#1 or PKCS#8 PEM-encoded + private key associated with the Client Certificate. If this + field is used then the 'client_certificate' field is + mandatory. + client_certificate (str): + Input only. The x509 PEM-encoded certificate that will be + used by the replica to authenticate against the source + database server.If this field is used then the 'client_key' + field is mandatory. + ca_certificate (str): + Required. Input only. The x509 PEM-encoded + certificate of the CA that signed the source + database server's certificate. The replica will + use this certificate to verify it's connecting + to the right host. + """ + class SslType(proto.Enum): + r"""Specifies The kind of ssl configuration used. + + Values: + SSL_TYPE_UNSPECIFIED (0): + Unspecified. + SERVER_ONLY (1): + Only 'ca_certificate' specified. + SERVER_CLIENT (2): + Both server ('ca_certificate'), and client ('client_key', + 'client_certificate') specified. + """ + SSL_TYPE_UNSPECIFIED = 0 + SERVER_ONLY = 1 + SERVER_CLIENT = 2 + + type_: SslType = proto.Field( + proto.ENUM, + number=1, + enum=SslType, + ) + client_key: str = proto.Field( + proto.STRING, + number=2, + ) + client_certificate: str = proto.Field( + proto.STRING, + number=3, + ) + ca_certificate: str = proto.Field( + proto.STRING, + number=4, + ) + + +class MySqlConnectionProfile(proto.Message): + r"""Specifies connection parameters required specifically for + MySQL databases. + + Attributes: + host (str): + Required. The IP or hostname of the source + MySQL database. + port (int): + Required. The network port of the source + MySQL database. + username (str): + Required. The username that Database + Migration Service will use to connect to the + database. The value is encrypted when stored in + Database Migration Service. + password (str): + Required. Input only. The password for the + user that Database Migration Service will be + using to connect to the database. This field is + not returned on request, and the value is + encrypted when stored in Database Migration + Service. + password_set (bool): + Output only. Indicates If this connection + profile password is stored. + ssl (google.cloud.clouddms_v1.types.SslConfig): + SSL configuration for the destination to + connect to the source database. + cloud_sql_id (str): + If the source is a Cloud SQL database, use + this field to provide the Cloud SQL instance ID + of the source. + """ + + host: str = proto.Field( + proto.STRING, + number=1, + ) + port: int = proto.Field( + proto.INT32, + number=2, + ) + username: str = proto.Field( + proto.STRING, + number=3, + ) + password: str = proto.Field( + proto.STRING, + number=4, + ) + password_set: bool = proto.Field( + proto.BOOL, + number=5, + ) + ssl: 'SslConfig' = proto.Field( + proto.MESSAGE, + number=6, + message='SslConfig', + ) + cloud_sql_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class PostgreSqlConnectionProfile(proto.Message): + r"""Specifies connection parameters required specifically for + PostgreSQL databases. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + host (str): + Required. The IP or hostname of the source + PostgreSQL database. + port (int): + Required. The network port of the source + PostgreSQL database. + username (str): + Required. The username that Database + Migration Service will use to connect to the + database. The value is encrypted when stored in + Database Migration Service. + password (str): + Required. Input only. The password for the + user that Database Migration Service will be + using to connect to the database. This field is + not returned on request, and the value is + encrypted when stored in Database Migration + Service. + password_set (bool): + Output only. Indicates If this connection + profile password is stored. + ssl (google.cloud.clouddms_v1.types.SslConfig): + SSL configuration for the destination to + connect to the source database. + cloud_sql_id (str): + If the source is a Cloud SQL database, use + this field to provide the Cloud SQL instance ID + of the source. + network_architecture (google.cloud.clouddms_v1.types.NetworkArchitecture): + Output only. If the source is a Cloud SQL + database, this field indicates the network + architecture it's associated with. + static_ip_connectivity (google.cloud.clouddms_v1.types.StaticIpConnectivity): + Static ip connectivity data (default, no + additional details needed). + + This field is a member of `oneof`_ ``connectivity``. + private_service_connect_connectivity (google.cloud.clouddms_v1.types.PrivateServiceConnectConnectivity): + Private service connect connectivity. + + This field is a member of `oneof`_ ``connectivity``. + """ + + host: str = proto.Field( + proto.STRING, + number=1, + ) + port: int = proto.Field( + proto.INT32, + number=2, + ) + username: str = proto.Field( + proto.STRING, + number=3, + ) + password: str = proto.Field( + proto.STRING, + number=4, + ) + password_set: bool = proto.Field( + proto.BOOL, + number=5, + ) + ssl: 'SslConfig' = proto.Field( + proto.MESSAGE, + number=6, + message='SslConfig', + ) + cloud_sql_id: str = proto.Field( + proto.STRING, + number=7, + ) + network_architecture: 'NetworkArchitecture' = proto.Field( + proto.ENUM, + number=8, + enum='NetworkArchitecture', + ) + static_ip_connectivity: 'StaticIpConnectivity' = proto.Field( + proto.MESSAGE, + number=100, + oneof='connectivity', + message='StaticIpConnectivity', + ) + private_service_connect_connectivity: 'PrivateServiceConnectConnectivity' = proto.Field( + proto.MESSAGE, + number=101, + oneof='connectivity', + message='PrivateServiceConnectConnectivity', + ) + + +class OracleConnectionProfile(proto.Message): + r"""Specifies connection parameters required specifically for + Oracle databases. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + host (str): + Required. The IP or hostname of the source + Oracle database. + port (int): + Required. The network port of the source + Oracle database. + username (str): + Required. The username that Database + Migration Service will use to connect to the + database. The value is encrypted when stored in + Database Migration Service. + password (str): + Required. Input only. The password for the + user that Database Migration Service will be + using to connect to the database. This field is + not returned on request, and the value is + encrypted when stored in Database Migration + Service. + password_set (bool): + Output only. Indicates whether a new password + is included in the request. + database_service (str): + Required. Database service for the Oracle + connection. + ssl (google.cloud.clouddms_v1.types.SslConfig): + SSL configuration for the connection to the source Oracle + database. + + - Only ``SERVER_ONLY`` configuration is supported for + Oracle SSL. + - SSL is supported for Oracle versions 12 and above. + static_service_ip_connectivity (google.cloud.clouddms_v1.types.StaticServiceIpConnectivity): + Static Service IP connectivity. + + This field is a member of `oneof`_ ``connectivity``. + forward_ssh_connectivity (google.cloud.clouddms_v1.types.ForwardSshTunnelConnectivity): + Forward SSH tunnel connectivity. + + This field is a member of `oneof`_ ``connectivity``. + private_connectivity (google.cloud.clouddms_v1.types.PrivateConnectivity): + Private connectivity. + + This field is a member of `oneof`_ ``connectivity``. + """ + + host: str = proto.Field( + proto.STRING, + number=1, + ) + port: int = proto.Field( + proto.INT32, + number=2, + ) + username: str = proto.Field( + proto.STRING, + number=3, + ) + password: str = proto.Field( + proto.STRING, + number=4, + ) + password_set: bool = proto.Field( + proto.BOOL, + number=5, + ) + database_service: str = proto.Field( + proto.STRING, + number=6, + ) + ssl: 'SslConfig' = proto.Field( + proto.MESSAGE, + number=7, + message='SslConfig', + ) + static_service_ip_connectivity: 'StaticServiceIpConnectivity' = proto.Field( + proto.MESSAGE, + number=100, + oneof='connectivity', + message='StaticServiceIpConnectivity', + ) + forward_ssh_connectivity: 'ForwardSshTunnelConnectivity' = proto.Field( + proto.MESSAGE, + number=101, + oneof='connectivity', + message='ForwardSshTunnelConnectivity', + ) + private_connectivity: 'PrivateConnectivity' = proto.Field( + proto.MESSAGE, + number=102, + oneof='connectivity', + message='PrivateConnectivity', + ) + + +class CloudSqlConnectionProfile(proto.Message): + r"""Specifies required connection parameters, and, optionally, + the parameters required to create a Cloud SQL destination + database instance. + + Attributes: + cloud_sql_id (str): + Output only. The Cloud SQL instance ID that + this connection profile is associated with. + settings (google.cloud.clouddms_v1.types.CloudSqlSettings): + Immutable. Metadata used to create the + destination Cloud SQL database. + private_ip (str): + Output only. The Cloud SQL database + instance's private IP. + public_ip (str): + Output only. The Cloud SQL database + instance's public IP. + additional_public_ip (str): + Output only. The Cloud SQL database + instance's additional (outgoing) public IP. Used + when the Cloud SQL database availability type is + REGIONAL (i.e. multiple zones / highly + available). + """ + + cloud_sql_id: str = proto.Field( + proto.STRING, + number=1, + ) + settings: 'CloudSqlSettings' = proto.Field( + proto.MESSAGE, + number=2, + message='CloudSqlSettings', + ) + private_ip: str = proto.Field( + proto.STRING, + number=3, + ) + public_ip: str = proto.Field( + proto.STRING, + number=4, + ) + additional_public_ip: str = proto.Field( + proto.STRING, + number=5, + ) + + +class AlloyDbConnectionProfile(proto.Message): + r"""Specifies required connection parameters, and the parameters + required to create an AlloyDB destination cluster. + + Attributes: + cluster_id (str): + Required. The AlloyDB cluster ID that this + connection profile is associated with. + settings (google.cloud.clouddms_v1.types.AlloyDbSettings): + Immutable. Metadata used to create the + destination AlloyDB cluster. + """ + + cluster_id: str = proto.Field( + proto.STRING, + number=1, + ) + settings: 'AlloyDbSettings' = proto.Field( + proto.MESSAGE, + number=2, + message='AlloyDbSettings', + ) + + +class SqlAclEntry(proto.Message): + r"""An entry for an Access Control list. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + The allowlisted value for the access control + list. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + The time when this access control entry expires in `RFC + 3339 `__ format, for + example: ``2012-11-15T16:19:00.094Z``. + + This field is a member of `oneof`_ ``expiration``. + ttl (google.protobuf.duration_pb2.Duration): + Input only. The time-to-leave of this access + control entry. + + This field is a member of `oneof`_ ``expiration``. + label (str): + A label to identify this entry. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + oneof='expiration', + message=timestamp_pb2.Timestamp, + ) + ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=11, + oneof='expiration', + message=duration_pb2.Duration, + ) + label: str = proto.Field( + proto.STRING, + number=3, + ) + + +class SqlIpConfig(proto.Message): + r"""IP Management configuration. + + Attributes: + enable_ipv4 (google.protobuf.wrappers_pb2.BoolValue): + Whether the instance should be assigned an + IPv4 address or not. + private_network (str): + The resource link for the VPC network from which the Cloud + SQL instance is accessible for private IP. For example, + ``projects/myProject/global/networks/default``. This setting + can be updated, but it cannot be removed after it is set. + allocated_ip_range (str): + Optional. The name of the allocated IP + address range for the private IP Cloud SQL + instance. This name refers to an already + allocated IP range address. If set, the instance + IP address will be created in the allocated + range. Note that this IP address range can't be + modified after the instance is created. If you + change the VPC when configuring connectivity + settings for the migration job, this field is + not relevant. + require_ssl (google.protobuf.wrappers_pb2.BoolValue): + Whether SSL connections over IP should be + enforced or not. + authorized_networks (MutableSequence[google.cloud.clouddms_v1.types.SqlAclEntry]): + The list of external networks that are allowed to connect to + the instance using the IP. See + https://en.wikipedia.org/wiki/CIDR_notation#CIDR_notation, + also known as 'slash' notation (e.g. ``192.168.100.0/24``). + """ + + enable_ipv4: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.BoolValue, + ) + private_network: str = proto.Field( + proto.STRING, + number=2, + ) + allocated_ip_range: str = proto.Field( + proto.STRING, + number=5, + ) + require_ssl: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=3, + message=wrappers_pb2.BoolValue, + ) + authorized_networks: MutableSequence['SqlAclEntry'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='SqlAclEntry', + ) + + +class CloudSqlSettings(proto.Message): + r"""Settings for creating a Cloud SQL database instance. + + Attributes: + database_version (google.cloud.clouddms_v1.types.CloudSqlSettings.SqlDatabaseVersion): + The database engine type and version. + user_labels (MutableMapping[str, str]): + The resource labels for a Cloud SQL instance to use to + annotate any related underlying resources such as Compute + Engine VMs. An object containing a list of "key": "value" + pairs. + + Example: + ``{ "name": "wrench", "mass": "18kg", "count": "3" }``. + tier (str): + The tier (or machine type) for this instance, for example: + ``db-n1-standard-1`` (MySQL instances) or + ``db-custom-1-3840`` (PostgreSQL instances). For more + information, see `Cloud SQL Instance + Settings `__. + storage_auto_resize_limit (google.protobuf.wrappers_pb2.Int64Value): + The maximum size to which storage capacity + can be automatically increased. The default + value is 0, which specifies that there is no + limit. + activation_policy (google.cloud.clouddms_v1.types.CloudSqlSettings.SqlActivationPolicy): + The activation policy specifies when the instance is + activated; it is applicable only when the instance state is + 'RUNNABLE'. Valid values: + + 'ALWAYS': The instance is on, and remains so even in the + absence of connection requests. + + ``NEVER``: The instance is off; it is not activated, even if + a connection request arrives. + ip_config (google.cloud.clouddms_v1.types.SqlIpConfig): + The settings for IP Management. This allows + to enable or disable the instance IP and manage + which external networks can connect to the + instance. The IPv4 address cannot be disabled. + auto_storage_increase (google.protobuf.wrappers_pb2.BoolValue): + [default: ON] If you enable this setting, Cloud SQL checks + your available storage every 30 seconds. If the available + storage falls below a threshold size, Cloud SQL + automatically adds additional storage capacity. If the + available storage repeatedly falls below the threshold size, + Cloud SQL continues to add storage until it reaches the + maximum of 30 TB. + database_flags (MutableMapping[str, str]): + The database flags passed to the Cloud SQL + instance at startup. An object containing a list + of "key": value pairs. Example: { "name": + "wrench", "mass": "1.3kg", "count": "3" }. + data_disk_type (google.cloud.clouddms_v1.types.CloudSqlSettings.SqlDataDiskType): + The type of storage: ``PD_SSD`` (default) or ``PD_HDD``. + data_disk_size_gb (google.protobuf.wrappers_pb2.Int64Value): + The storage capacity available to the + database, in GB. The minimum (and default) size + is 10GB. + zone (str): + The Google Cloud Platform zone where your + Cloud SQL database instance is located. + secondary_zone (str): + Optional. The Google Cloud Platform zone + where the failover Cloud SQL database instance + is located. Used when the Cloud SQL database + availability type is REGIONAL (i.e. multiple + zones / highly available). + source_id (str): + The Database Migration Service source connection profile ID, + in the format: + ``projects/my_project_name/locations/us-central1/connectionProfiles/connection_profile_ID`` + root_password (str): + Input only. Initial root password. + root_password_set (bool): + Output only. Indicates If this connection + profile root password is stored. + collation (str): + The Cloud SQL default instance level + collation. + cmek_key_name (str): + The KMS key name used for the csql instance. + availability_type (google.cloud.clouddms_v1.types.CloudSqlSettings.SqlAvailabilityType): + Optional. Availability type. Potential values: + + - ``ZONAL``: The instance serves data from only one zone. + Outages in that zone affect data availability. + - ``REGIONAL``: The instance can serve data from more than + one zone in a region (it is highly available). + edition (google.cloud.clouddms_v1.types.CloudSqlSettings.Edition): + Optional. The edition of the given Cloud SQL + instance. + """ + class SqlActivationPolicy(proto.Enum): + r"""Specifies when the instance should be activated. + + Values: + SQL_ACTIVATION_POLICY_UNSPECIFIED (0): + unspecified policy. + ALWAYS (1): + The instance is always up and running. + NEVER (2): + The instance should never spin up. + """ + SQL_ACTIVATION_POLICY_UNSPECIFIED = 0 + ALWAYS = 1 + NEVER = 2 + + class SqlDataDiskType(proto.Enum): + r"""The storage options for Cloud SQL databases. + + Values: + SQL_DATA_DISK_TYPE_UNSPECIFIED (0): + Unspecified. + PD_SSD (1): + SSD disk. + PD_HDD (2): + HDD disk. + """ + SQL_DATA_DISK_TYPE_UNSPECIFIED = 0 + PD_SSD = 1 + PD_HDD = 2 + + class SqlDatabaseVersion(proto.Enum): + r"""The database engine type and version. + + Values: + SQL_DATABASE_VERSION_UNSPECIFIED (0): + Unspecified version. + MYSQL_5_6 (1): + MySQL 5.6. + MYSQL_5_7 (2): + MySQL 5.7. + POSTGRES_9_6 (3): + PostgreSQL 9.6. + POSTGRES_11 (4): + PostgreSQL 11. + POSTGRES_10 (5): + PostgreSQL 10. + MYSQL_8_0 (6): + MySQL 8.0. + POSTGRES_12 (7): + PostgreSQL 12. + POSTGRES_13 (8): + PostgreSQL 13. + POSTGRES_14 (17): + PostgreSQL 14. + POSTGRES_15 (18): + PostgreSQL 15. + """ + SQL_DATABASE_VERSION_UNSPECIFIED = 0 + MYSQL_5_6 = 1 + MYSQL_5_7 = 2 + POSTGRES_9_6 = 3 + POSTGRES_11 = 4 + POSTGRES_10 = 5 + MYSQL_8_0 = 6 + POSTGRES_12 = 7 + POSTGRES_13 = 8 + POSTGRES_14 = 17 + POSTGRES_15 = 18 + + class SqlAvailabilityType(proto.Enum): + r"""The availability type of the given Cloud SQL instance. + + Values: + SQL_AVAILABILITY_TYPE_UNSPECIFIED (0): + This is an unknown Availability type. + ZONAL (1): + Zonal availablility instance. + REGIONAL (2): + Regional availability instance. + """ + SQL_AVAILABILITY_TYPE_UNSPECIFIED = 0 + ZONAL = 1 + REGIONAL = 2 + + class Edition(proto.Enum): + r"""The edition of the given Cloud SQL instance. Can be ENTERPRISE or + ENTERPRISE_PLUS. + + Values: + EDITION_UNSPECIFIED (0): + The instance did not specify the edition. + ENTERPRISE (2): + The instance is an enterprise edition. + ENTERPRISE_PLUS (3): + The instance is an enterprise plus edition. + """ + EDITION_UNSPECIFIED = 0 + ENTERPRISE = 2 + ENTERPRISE_PLUS = 3 + + database_version: SqlDatabaseVersion = proto.Field( + proto.ENUM, + number=1, + enum=SqlDatabaseVersion, + ) + user_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + tier: str = proto.Field( + proto.STRING, + number=3, + ) + storage_auto_resize_limit: wrappers_pb2.Int64Value = proto.Field( + proto.MESSAGE, + number=4, + message=wrappers_pb2.Int64Value, + ) + activation_policy: SqlActivationPolicy = proto.Field( + proto.ENUM, + number=5, + enum=SqlActivationPolicy, + ) + ip_config: 'SqlIpConfig' = proto.Field( + proto.MESSAGE, + number=6, + message='SqlIpConfig', + ) + auto_storage_increase: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=7, + message=wrappers_pb2.BoolValue, + ) + database_flags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + data_disk_type: SqlDataDiskType = proto.Field( + proto.ENUM, + number=9, + enum=SqlDataDiskType, + ) + data_disk_size_gb: wrappers_pb2.Int64Value = proto.Field( + proto.MESSAGE, + number=10, + message=wrappers_pb2.Int64Value, + ) + zone: str = proto.Field( + proto.STRING, + number=11, + ) + secondary_zone: str = proto.Field( + proto.STRING, + number=18, + ) + source_id: str = proto.Field( + proto.STRING, + number=12, + ) + root_password: str = proto.Field( + proto.STRING, + number=13, + ) + root_password_set: bool = proto.Field( + proto.BOOL, + number=14, + ) + collation: str = proto.Field( + proto.STRING, + number=15, + ) + cmek_key_name: str = proto.Field( + proto.STRING, + number=16, + ) + availability_type: SqlAvailabilityType = proto.Field( + proto.ENUM, + number=17, + enum=SqlAvailabilityType, + ) + edition: Edition = proto.Field( + proto.ENUM, + number=19, + enum=Edition, + ) + + +class AlloyDbSettings(proto.Message): + r"""Settings for creating an AlloyDB cluster. + + Attributes: + initial_user (google.cloud.clouddms_v1.types.AlloyDbSettings.UserPassword): + Required. Input only. Initial user to setup + during cluster creation. Required. + vpc_network (str): + Required. The resource link for the VPC network in which + cluster resources are created and from which they are + accessible via Private IP. The network must belong to the + same project as the cluster. It is specified in the form: + "projects/{project_number}/global/networks/{network_id}". + This is required to create a cluster. + labels (MutableMapping[str, str]): + Labels for the AlloyDB cluster created by + DMS. An object containing a list of 'key', + 'value' pairs. + primary_instance_settings (google.cloud.clouddms_v1.types.AlloyDbSettings.PrimaryInstanceSettings): + + encryption_config (google.cloud.clouddms_v1.types.AlloyDbSettings.EncryptionConfig): + Optional. The encryption config can be + specified to encrypt the data disks and other + persistent data resources of a cluster with a + customer-managed encryption key (CMEK). When + this field is not specified, the cluster will + then use default encryption scheme to protect + the user data. + """ + + class UserPassword(proto.Message): + r"""The username/password for a database user. Used for + specifying initial users at cluster creation time. + + Attributes: + user (str): + The database username. + password (str): + The initial password for the user. + password_set (bool): + Output only. Indicates if the initial_user.password field + has been set. + """ + + user: str = proto.Field( + proto.STRING, + number=1, + ) + password: str = proto.Field( + proto.STRING, + number=2, + ) + password_set: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class PrimaryInstanceSettings(proto.Message): + r"""Settings for the cluster's primary instance + + Attributes: + id (str): + Required. The ID of the AlloyDB primary instance. The ID + must satisfy the regex expression "[a-z0-9-]+". + machine_config (google.cloud.clouddms_v1.types.AlloyDbSettings.PrimaryInstanceSettings.MachineConfig): + Configuration for the machines that host the + underlying database engine. + database_flags (MutableMapping[str, str]): + Database flags to pass to AlloyDB when DMS is + creating the AlloyDB cluster and instances. See + the AlloyDB documentation for how these can be + used. + labels (MutableMapping[str, str]): + Labels for the AlloyDB primary instance + created by DMS. An object containing a list of + 'key', 'value' pairs. + private_ip (str): + Output only. The private IP address for the + Instance. This is the connection endpoint for an + end-user application. + """ + + class MachineConfig(proto.Message): + r"""MachineConfig describes the configuration of a machine. + + Attributes: + cpu_count (int): + The number of CPU's in the VM instance. + """ + + cpu_count: int = proto.Field( + proto.INT32, + number=1, + ) + + id: str = proto.Field( + proto.STRING, + number=1, + ) + machine_config: 'AlloyDbSettings.PrimaryInstanceSettings.MachineConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='AlloyDbSettings.PrimaryInstanceSettings.MachineConfig', + ) + database_flags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + private_ip: str = proto.Field( + proto.STRING, + number=8, + ) + + class EncryptionConfig(proto.Message): + r"""EncryptionConfig describes the encryption config of a cluster + that is encrypted with a CMEK (customer-managed encryption key). + + Attributes: + kms_key_name (str): + The fully-qualified resource name of the KMS key. Each Cloud + KMS key is regionalized and has the following format: + projects/[PROJECT]/locations/[REGION]/keyRings/[RING]/cryptoKeys/[KEY_NAME] + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=1, + ) + + initial_user: UserPassword = proto.Field( + proto.MESSAGE, + number=1, + message=UserPassword, + ) + vpc_network: str = proto.Field( + proto.STRING, + number=2, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + primary_instance_settings: PrimaryInstanceSettings = proto.Field( + proto.MESSAGE, + number=4, + message=PrimaryInstanceSettings, + ) + encryption_config: EncryptionConfig = proto.Field( + proto.MESSAGE, + number=5, + message=EncryptionConfig, + ) + + +class StaticIpConnectivity(proto.Message): + r"""The source database will allow incoming connections from the + public IP of the destination database. You can retrieve the + public IP of the Cloud SQL instance from the Cloud SQL console + or using Cloud SQL APIs. No additional configuration is + required. + + """ + + +class PrivateServiceConnectConnectivity(proto.Message): + r"""`Private Service Connect + connectivity `__ + + Attributes: + service_attachment (str): + Required. A service attachment that exposes a database, and + has the following format: + projects/{project}/regions/{region}/serviceAttachments/{service_attachment_name} + """ + + service_attachment: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ReverseSshConnectivity(proto.Message): + r"""The details needed to configure a reverse SSH tunnel between + the source and destination databases. These details will be used + when calling the generateSshScript method (see + https://cloud.google.com/database-migration/docs/reference/rest/v1/projects.locations.migrationJobs/generateSshScript) + to produce the script that will help set up the reverse SSH + tunnel, and to set up the VPC peering between the Cloud SQL + private network and the VPC. + + Attributes: + vm_ip (str): + Required. The IP of the virtual machine + (Compute Engine) used as the bastion server for + the SSH tunnel. + vm_port (int): + Required. The forwarding port of the virtual + machine (Compute Engine) used as the bastion + server for the SSH tunnel. + vm (str): + The name of the virtual machine (Compute + Engine) used as the bastion server for the SSH + tunnel. + vpc (str): + The name of the VPC to peer with the Cloud + SQL private network. + """ + + vm_ip: str = proto.Field( + proto.STRING, + number=1, + ) + vm_port: int = proto.Field( + proto.INT32, + number=2, + ) + vm: str = proto.Field( + proto.STRING, + number=3, + ) + vpc: str = proto.Field( + proto.STRING, + number=4, + ) + + +class VpcPeeringConnectivity(proto.Message): + r"""The details of the VPC where the source database is located + in Google Cloud. We will use this information to set up the VPC + peering connection between Cloud SQL and this VPC. + + Attributes: + vpc (str): + The name of the VPC network to peer with the + Cloud SQL private network. + """ + + vpc: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ForwardSshTunnelConnectivity(proto.Message): + r"""Forward SSH Tunnel connectivity. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hostname (str): + Required. Hostname for the SSH tunnel. + username (str): + Required. Username for the SSH tunnel. + port (int): + Port for the SSH tunnel, default value is 22. + password (str): + Input only. SSH password. + + This field is a member of `oneof`_ ``authentication_method``. + private_key (str): + Input only. SSH private key. + + This field is a member of `oneof`_ ``authentication_method``. + """ + + hostname: str = proto.Field( + proto.STRING, + number=1, + ) + username: str = proto.Field( + proto.STRING, + number=2, + ) + port: int = proto.Field( + proto.INT32, + number=3, + ) + password: str = proto.Field( + proto.STRING, + number=100, + oneof='authentication_method', + ) + private_key: str = proto.Field( + proto.STRING, + number=101, + oneof='authentication_method', + ) + + +class StaticServiceIpConnectivity(proto.Message): + r"""Static IP address connectivity configured on service project. + """ + + +class PrivateConnectivity(proto.Message): + r"""Private Connectivity. + + Attributes: + private_connection (str): + Required. The resource name (URI) of the + private connection. + """ + + private_connection: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DatabaseType(proto.Message): + r"""A message defining the database engine and provider. + + Attributes: + provider (google.cloud.clouddms_v1.types.DatabaseProvider): + The database provider. + engine (google.cloud.clouddms_v1.types.DatabaseEngine): + The database engine. + """ + + provider: 'DatabaseProvider' = proto.Field( + proto.ENUM, + number=1, + enum='DatabaseProvider', + ) + engine: 'DatabaseEngine' = proto.Field( + proto.ENUM, + number=2, + enum='DatabaseEngine', + ) + + +class MigrationJob(proto.Message): + r"""Represents a Database Migration Service migration job object. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The name (URI) of this migration job + resource, in the form of: + projects/{project}/locations/{location}/migrationJobs/{migrationJob}. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the migration + job resource was created. A timestamp in RFC3339 + UTC "Zulu" format, accurate to nanoseconds. + Example: "2014-10-02T15:01:23.045123456Z". + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the migration + job resource was last updated. A timestamp in + RFC3339 UTC "Zulu" format, accurate to + nanoseconds. Example: + "2014-10-02T15:01:23.045123456Z". + labels (MutableMapping[str, str]): + The resource labels for migration job to use to annotate any + related underlying resources such as Compute Engine VMs. An + object containing a list of "key": "value" pairs. + + Example: + ``{ "name": "wrench", "mass": "1.3kg", "count": "3" }``. + display_name (str): + The migration job display name. + state (google.cloud.clouddms_v1.types.MigrationJob.State): + The current migration job state. + phase (google.cloud.clouddms_v1.types.MigrationJob.Phase): + Output only. The current migration job phase. + type_ (google.cloud.clouddms_v1.types.MigrationJob.Type): + Required. The migration job type. + dump_path (str): + The path to the dump file in Google Cloud Storage, in the + format: (gs://[BUCKET_NAME]/[OBJECT_NAME]). This field and + the "dump_flags" field are mutually exclusive. + dump_flags (google.cloud.clouddms_v1.types.MigrationJob.DumpFlags): + The initial dump flags. This field and the "dump_path" field + are mutually exclusive. + source (str): + Required. The resource name (URI) of the + source connection profile. + destination (str): + Required. The resource name (URI) of the + destination connection profile. + reverse_ssh_connectivity (google.cloud.clouddms_v1.types.ReverseSshConnectivity): + The details needed to communicate to the + source over Reverse SSH tunnel connectivity. + + This field is a member of `oneof`_ ``connectivity``. + vpc_peering_connectivity (google.cloud.clouddms_v1.types.VpcPeeringConnectivity): + The details of the VPC network that the + source database is located in. + + This field is a member of `oneof`_ ``connectivity``. + static_ip_connectivity (google.cloud.clouddms_v1.types.StaticIpConnectivity): + static ip connectivity data (default, no + additional details needed). + + This field is a member of `oneof`_ ``connectivity``. + duration (google.protobuf.duration_pb2.Duration): + Output only. The duration of the migration + job (in seconds). A duration in seconds with up + to nine fractional digits, terminated by 's'. + Example: + + "3.5s". + error (google.rpc.status_pb2.Status): + Output only. The error details in case of + state FAILED. + source_database (google.cloud.clouddms_v1.types.DatabaseType): + The database engine type and provider of the + source. + destination_database (google.cloud.clouddms_v1.types.DatabaseType): + The database engine type and provider of the + destination. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. If the migration job is + completed, the time when it was completed. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspaceInfo): + The conversion workspace used by the + migration. + filter (str): + This field can be used to select the entities + to migrate as part of the migration job. It uses + AIP-160 notation to select a subset of the + entities configured on the associated + conversion-workspace. This field should not be + set on migration-jobs that are not associated + with a conversion workspace. + cmek_key_name (str): + The CMEK (customer-managed encryption key) fully qualified + key name used for the migration job. This field supports all + migration jobs types except for: + + - Mysql to Mysql (use the cmek field in the cloudsql + connection profile instead). + - PostrgeSQL to PostgreSQL (use the cmek field in the + cloudsql connection profile instead). + - PostgreSQL to AlloyDB (use the kms_key_name field in the + alloydb connection profile instead). Each Cloud CMEK key + has the following format: + projects/[PROJECT]/locations/[REGION]/keyRings/[RING]/cryptoKeys/[KEY_NAME] + performance_config (google.cloud.clouddms_v1.types.MigrationJob.PerformanceConfig): + Optional. Data dump parallelism settings used + by the migration. Currently applicable only for + MySQL to Cloud SQL for MySQL migrations only. + """ + class State(proto.Enum): + r"""The current migration job states. + + Values: + STATE_UNSPECIFIED (0): + The state of the migration job is unknown. + MAINTENANCE (1): + The migration job is down for maintenance. + DRAFT (2): + The migration job is in draft mode and no + resources are created. + CREATING (3): + The migration job is being created. + NOT_STARTED (4): + The migration job is created and not started. + RUNNING (5): + The migration job is running. + FAILED (6): + The migration job failed. + COMPLETED (7): + The migration job has been completed. + DELETING (8): + The migration job is being deleted. + STOPPING (9): + The migration job is being stopped. + STOPPED (10): + The migration job is currently stopped. + DELETED (11): + The migration job has been deleted. + UPDATING (12): + The migration job is being updated. + STARTING (13): + The migration job is starting. + RESTARTING (14): + The migration job is restarting. + RESUMING (15): + The migration job is resuming. + """ + STATE_UNSPECIFIED = 0 + MAINTENANCE = 1 + DRAFT = 2 + CREATING = 3 + NOT_STARTED = 4 + RUNNING = 5 + FAILED = 6 + COMPLETED = 7 + DELETING = 8 + STOPPING = 9 + STOPPED = 10 + DELETED = 11 + UPDATING = 12 + STARTING = 13 + RESTARTING = 14 + RESUMING = 15 + + class Phase(proto.Enum): + r"""The current migration job phase. + + Values: + PHASE_UNSPECIFIED (0): + The phase of the migration job is unknown. + FULL_DUMP (1): + The migration job is in the full dump phase. + CDC (2): + The migration job is CDC phase. + PROMOTE_IN_PROGRESS (3): + The migration job is running the promote + phase. + WAITING_FOR_SOURCE_WRITES_TO_STOP (4): + Only RDS flow - waiting for source writes to + stop + PREPARING_THE_DUMP (5): + Only RDS flow - the sources writes stopped, + waiting for dump to begin + """ + PHASE_UNSPECIFIED = 0 + FULL_DUMP = 1 + CDC = 2 + PROMOTE_IN_PROGRESS = 3 + WAITING_FOR_SOURCE_WRITES_TO_STOP = 4 + PREPARING_THE_DUMP = 5 + + class Type(proto.Enum): + r"""The type of migration job (one-time or continuous). + + Values: + TYPE_UNSPECIFIED (0): + The type of the migration job is unknown. + ONE_TIME (1): + The migration job is a one time migration. + CONTINUOUS (2): + The migration job is a continuous migration. + """ + TYPE_UNSPECIFIED = 0 + ONE_TIME = 1 + CONTINUOUS = 2 + + class DumpFlag(proto.Message): + r"""Dump flag definition. + + Attributes: + name (str): + The name of the flag + value (str): + The value of the flag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + value: str = proto.Field( + proto.STRING, + number=2, + ) + + class DumpFlags(proto.Message): + r"""Dump flags definition. + + Attributes: + dump_flags (MutableSequence[google.cloud.clouddms_v1.types.MigrationJob.DumpFlag]): + The flags for the initial dump. + """ + + dump_flags: MutableSequence['MigrationJob.DumpFlag'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='MigrationJob.DumpFlag', + ) + + class PerformanceConfig(proto.Message): + r"""Performance configuration definition. + + Attributes: + dump_parallel_level (google.cloud.clouddms_v1.types.MigrationJob.PerformanceConfig.DumpParallelLevel): + Initial dump parallelism level. + """ + class DumpParallelLevel(proto.Enum): + r"""Describes the parallelism level during initial dump. + + Values: + DUMP_PARALLEL_LEVEL_UNSPECIFIED (0): + Unknown dump parallel level. Will be + defaulted to OPTIMAL. + MIN (1): + Minimal parallel level. + OPTIMAL (2): + Optimal parallel level. + MAX (3): + Maximum parallel level. + """ + DUMP_PARALLEL_LEVEL_UNSPECIFIED = 0 + MIN = 1 + OPTIMAL = 2 + MAX = 3 + + dump_parallel_level: 'MigrationJob.PerformanceConfig.DumpParallelLevel' = proto.Field( + proto.ENUM, + number=1, + enum='MigrationJob.PerformanceConfig.DumpParallelLevel', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + phase: Phase = proto.Field( + proto.ENUM, + number=7, + enum=Phase, + ) + type_: Type = proto.Field( + proto.ENUM, + number=8, + enum=Type, + ) + dump_path: str = proto.Field( + proto.STRING, + number=9, + ) + dump_flags: DumpFlags = proto.Field( + proto.MESSAGE, + number=17, + message=DumpFlags, + ) + source: str = proto.Field( + proto.STRING, + number=10, + ) + destination: str = proto.Field( + proto.STRING, + number=11, + ) + reverse_ssh_connectivity: 'ReverseSshConnectivity' = proto.Field( + proto.MESSAGE, + number=101, + oneof='connectivity', + message='ReverseSshConnectivity', + ) + vpc_peering_connectivity: 'VpcPeeringConnectivity' = proto.Field( + proto.MESSAGE, + number=102, + oneof='connectivity', + message='VpcPeeringConnectivity', + ) + static_ip_connectivity: 'StaticIpConnectivity' = proto.Field( + proto.MESSAGE, + number=103, + oneof='connectivity', + message='StaticIpConnectivity', + ) + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=12, + message=duration_pb2.Duration, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=13, + message=status_pb2.Status, + ) + source_database: 'DatabaseType' = proto.Field( + proto.MESSAGE, + number=14, + message='DatabaseType', + ) + destination_database: 'DatabaseType' = proto.Field( + proto.MESSAGE, + number=15, + message='DatabaseType', + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=16, + message=timestamp_pb2.Timestamp, + ) + conversion_workspace: 'ConversionWorkspaceInfo' = proto.Field( + proto.MESSAGE, + number=18, + message='ConversionWorkspaceInfo', + ) + filter: str = proto.Field( + proto.STRING, + number=20, + ) + cmek_key_name: str = proto.Field( + proto.STRING, + number=21, + ) + performance_config: PerformanceConfig = proto.Field( + proto.MESSAGE, + number=22, + message=PerformanceConfig, + ) + + +class ConversionWorkspaceInfo(proto.Message): + r"""A conversion workspace's version. + + Attributes: + name (str): + The resource name (URI) of the conversion + workspace. + commit_id (str): + The commit ID of the conversion workspace. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + commit_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ConnectionProfile(proto.Message): + r"""A connection profile definition. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The name of this connection profile resource + in the form of + projects/{project}/locations/{location}/connectionProfiles/{connectionProfile}. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. A timestamp in RFC3339 UTC "Zulu" + format, accurate to nanoseconds. Example: + "2014-10-02T15:01:23.045123456Z". + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was last updated. A timestamp in RFC3339 UTC + "Zulu" format, accurate to nanoseconds. Example: + "2014-10-02T15:01:23.045123456Z". + labels (MutableMapping[str, str]): + The resource labels for connection profile to use to + annotate any related underlying resources such as Compute + Engine VMs. An object containing a list of "key": "value" + pairs. + + Example: + ``{ "name": "wrench", "mass": "1.3kg", "count": "3" }``. + state (google.cloud.clouddms_v1.types.ConnectionProfile.State): + The current connection profile state (e.g. + DRAFT, READY, or FAILED). + display_name (str): + The connection profile display name. + mysql (google.cloud.clouddms_v1.types.MySqlConnectionProfile): + A MySQL database connection profile. + + This field is a member of `oneof`_ ``connection_profile``. + postgresql (google.cloud.clouddms_v1.types.PostgreSqlConnectionProfile): + A PostgreSQL database connection profile. + + This field is a member of `oneof`_ ``connection_profile``. + oracle (google.cloud.clouddms_v1.types.OracleConnectionProfile): + An Oracle database connection profile. + + This field is a member of `oneof`_ ``connection_profile``. + cloudsql (google.cloud.clouddms_v1.types.CloudSqlConnectionProfile): + A CloudSQL database connection profile. + + This field is a member of `oneof`_ ``connection_profile``. + alloydb (google.cloud.clouddms_v1.types.AlloyDbConnectionProfile): + An AlloyDB cluster connection profile. + + This field is a member of `oneof`_ ``connection_profile``. + error (google.rpc.status_pb2.Status): + Output only. The error details in case of + state FAILED. + provider (google.cloud.clouddms_v1.types.DatabaseProvider): + The database provider. + """ + class State(proto.Enum): + r"""The current connection profile state (e.g. DRAFT, READY, or + FAILED). + + Values: + STATE_UNSPECIFIED (0): + The state of the connection profile is + unknown. + DRAFT (1): + The connection profile is in draft mode and + fully editable. + CREATING (2): + The connection profile is being created. + READY (3): + The connection profile is ready. + UPDATING (4): + The connection profile is being updated. + DELETING (5): + The connection profile is being deleted. + DELETED (6): + The connection profile has been deleted. + FAILED (7): + The last action on the connection profile + failed. + """ + STATE_UNSPECIFIED = 0 + DRAFT = 1 + CREATING = 2 + READY = 3 + UPDATING = 4 + DELETING = 5 + DELETED = 6 + FAILED = 7 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + mysql: 'MySqlConnectionProfile' = proto.Field( + proto.MESSAGE, + number=100, + oneof='connection_profile', + message='MySqlConnectionProfile', + ) + postgresql: 'PostgreSqlConnectionProfile' = proto.Field( + proto.MESSAGE, + number=101, + oneof='connection_profile', + message='PostgreSqlConnectionProfile', + ) + oracle: 'OracleConnectionProfile' = proto.Field( + proto.MESSAGE, + number=104, + oneof='connection_profile', + message='OracleConnectionProfile', + ) + cloudsql: 'CloudSqlConnectionProfile' = proto.Field( + proto.MESSAGE, + number=102, + oneof='connection_profile', + message='CloudSqlConnectionProfile', + ) + alloydb: 'AlloyDbConnectionProfile' = proto.Field( + proto.MESSAGE, + number=105, + oneof='connection_profile', + message='AlloyDbConnectionProfile', + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=7, + message=status_pb2.Status, + ) + provider: 'DatabaseProvider' = proto.Field( + proto.ENUM, + number=8, + enum='DatabaseProvider', + ) + + +class MigrationJobVerificationError(proto.Message): + r"""Error message of a verification Migration job. + + Attributes: + error_code (google.cloud.clouddms_v1.types.MigrationJobVerificationError.ErrorCode): + Output only. An instance of ErrorCode + specifying the error that occurred. + error_message (str): + Output only. A formatted message with further + details about the error and a CTA. + error_detail_message (str): + Output only. A specific detailed error + message, if supplied by the engine. + """ + class ErrorCode(proto.Enum): + r"""A general error code describing the type of error that + occurred. + + Values: + ERROR_CODE_UNSPECIFIED (0): + An unknown error occurred + CONNECTION_FAILURE (1): + We failed to connect to one of the connection + profile. + AUTHENTICATION_FAILURE (2): + We failed to authenticate to one of the + connection profile. + INVALID_CONNECTION_PROFILE_CONFIG (3): + One of the involved connection profiles has + an invalid configuration. + VERSION_INCOMPATIBILITY (4): + The versions of the source and the + destination are incompatible. + CONNECTION_PROFILE_TYPES_INCOMPATIBILITY (5): + The types of the source and the destination + are incompatible. + NO_PGLOGICAL_INSTALLED (7): + No pglogical extension installed on + databases, applicable for postgres. + PGLOGICAL_NODE_ALREADY_EXISTS (8): + pglogical node already exists on databases, + applicable for postgres. + INVALID_WAL_LEVEL (9): + The value of parameter wal_level is not set to logical. + INVALID_SHARED_PRELOAD_LIBRARY (10): + The value of parameter shared_preload_libraries does not + include pglogical. + INSUFFICIENT_MAX_REPLICATION_SLOTS (11): + The value of parameter max_replication_slots is not + sufficient. + INSUFFICIENT_MAX_WAL_SENDERS (12): + The value of parameter max_wal_senders is not sufficient. + INSUFFICIENT_MAX_WORKER_PROCESSES (13): + The value of parameter max_worker_processes is not + sufficient. + UNSUPPORTED_EXTENSIONS (14): + Extensions installed are either not supported + or having unsupported versions. + UNSUPPORTED_MIGRATION_TYPE (15): + Unsupported migration type. + INVALID_RDS_LOGICAL_REPLICATION (16): + Invalid RDS logical replication. + UNSUPPORTED_GTID_MODE (17): + The gtid_mode is not supported, applicable for MySQL. + UNSUPPORTED_TABLE_DEFINITION (18): + The table definition is not support due to + missing primary key or replica identity. + UNSUPPORTED_DEFINER (19): + The definer is not supported. + CANT_RESTART_RUNNING_MIGRATION (21): + Migration is already running at the time of + restart request. + SOURCE_ALREADY_SETUP (23): + The source already has a replication setup. + TABLES_WITH_LIMITED_SUPPORT (24): + The source has tables with limited support. + E.g. PostgreSQL tables without primary keys. + UNSUPPORTED_DATABASE_LOCALE (25): + The source uses an unsupported locale. + UNSUPPORTED_DATABASE_FDW_CONFIG (26): + The source uses an unsupported Foreign Data + Wrapper configuration. + ERROR_RDBMS (27): + There was an underlying RDBMS error. + SOURCE_SIZE_EXCEEDS_THRESHOLD (28): + The source DB size in Bytes exceeds a certain + threshold. The migration might require an + increase of quota, or might not be supported. + EXISTING_CONFLICTING_DATABASES (29): + The destination DB contains existing + databases that are conflicting with those in the + source DB. + PARALLEL_IMPORT_INSUFFICIENT_PRIVILEGE (30): + Insufficient privilege to enable the + parallelism configuration. + """ + ERROR_CODE_UNSPECIFIED = 0 + CONNECTION_FAILURE = 1 + AUTHENTICATION_FAILURE = 2 + INVALID_CONNECTION_PROFILE_CONFIG = 3 + VERSION_INCOMPATIBILITY = 4 + CONNECTION_PROFILE_TYPES_INCOMPATIBILITY = 5 + NO_PGLOGICAL_INSTALLED = 7 + PGLOGICAL_NODE_ALREADY_EXISTS = 8 + INVALID_WAL_LEVEL = 9 + INVALID_SHARED_PRELOAD_LIBRARY = 10 + INSUFFICIENT_MAX_REPLICATION_SLOTS = 11 + INSUFFICIENT_MAX_WAL_SENDERS = 12 + INSUFFICIENT_MAX_WORKER_PROCESSES = 13 + UNSUPPORTED_EXTENSIONS = 14 + UNSUPPORTED_MIGRATION_TYPE = 15 + INVALID_RDS_LOGICAL_REPLICATION = 16 + UNSUPPORTED_GTID_MODE = 17 + UNSUPPORTED_TABLE_DEFINITION = 18 + UNSUPPORTED_DEFINER = 19 + CANT_RESTART_RUNNING_MIGRATION = 21 + SOURCE_ALREADY_SETUP = 23 + TABLES_WITH_LIMITED_SUPPORT = 24 + UNSUPPORTED_DATABASE_LOCALE = 25 + UNSUPPORTED_DATABASE_FDW_CONFIG = 26 + ERROR_RDBMS = 27 + SOURCE_SIZE_EXCEEDS_THRESHOLD = 28 + EXISTING_CONFLICTING_DATABASES = 29 + PARALLEL_IMPORT_INSUFFICIENT_PRIVILEGE = 30 + + error_code: ErrorCode = proto.Field( + proto.ENUM, + number=1, + enum=ErrorCode, + ) + error_message: str = proto.Field( + proto.STRING, + number=2, + ) + error_detail_message: str = proto.Field( + proto.STRING, + number=3, + ) + + +class PrivateConnection(proto.Message): + r"""The PrivateConnection resource is used to establish private + connectivity with the customer's network. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The name of the resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The create time of the resource. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update time of the + resource. + labels (MutableMapping[str, str]): + The resource labels for private connections to use to + annotate any related underlying resources such as Compute + Engine VMs. An object containing a list of "key": "value" + pairs. + + Example: + ``{ "name": "wrench", "mass": "1.3kg", "count": "3" }``. + display_name (str): + The private connection display name. + state (google.cloud.clouddms_v1.types.PrivateConnection.State): + Output only. The state of the private + connection. + error (google.rpc.status_pb2.Status): + Output only. The error details in case of + state FAILED. + vpc_peering_config (google.cloud.clouddms_v1.types.VpcPeeringConfig): + VPC peering configuration. + + This field is a member of `oneof`_ ``connectivity``. + """ + class State(proto.Enum): + r"""Private Connection state. + + Values: + STATE_UNSPECIFIED (0): + No description available. + CREATING (1): + The private connection is in creation state - + creating resources. + CREATED (2): + The private connection has been created with + all of its resources. + FAILED (3): + The private connection creation has failed. + DELETING (4): + The private connection is being deleted. + FAILED_TO_DELETE (5): + Delete request has failed, resource is in + invalid state. + DELETED (6): + The private connection has been deleted. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + CREATED = 2 + FAILED = 3 + DELETING = 4 + FAILED_TO_DELETE = 5 + DELETED = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=7, + message=status_pb2.Status, + ) + vpc_peering_config: 'VpcPeeringConfig' = proto.Field( + proto.MESSAGE, + number=100, + oneof='connectivity', + message='VpcPeeringConfig', + ) + + +class VpcPeeringConfig(proto.Message): + r"""The VPC peering configuration is used to create VPC peering + with the consumer's VPC. + + Attributes: + vpc_name (str): + Required. Fully qualified name of the VPC + that Database Migration Service will peer to. + subnet (str): + Required. A free subnet for peering. (CIDR of + /29) + """ + + vpc_name: str = proto.Field( + proto.STRING, + number=1, + ) + subnet: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/conversionworkspace_resources.py b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/conversionworkspace_resources.py new file mode 100644 index 000000000000..1828fcb93353 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/google/cloud/clouddms_v1/types/conversionworkspace_resources.py @@ -0,0 +1,2719 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.clouddms_v1.types import clouddms_resources +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.clouddms.v1', + manifest={ + 'ValuePresentInList', + 'DatabaseEntityType', + 'EntityNameTransformation', + 'BackgroundJobType', + 'ImportRulesFileFormat', + 'ValueComparison', + 'NumericFilterOption', + 'DatabaseEngineInfo', + 'ConversionWorkspace', + 'BackgroundJobLogEntry', + 'MappingRuleFilter', + 'MappingRule', + 'SingleEntityRename', + 'MultiEntityRename', + 'EntityMove', + 'SingleColumnChange', + 'MultiColumnDatatypeChange', + 'SourceTextFilter', + 'SourceNumericFilter', + 'ConditionalColumnSetValue', + 'ValueTransformation', + 'ConvertRowIdToColumn', + 'SetTablePrimaryKey', + 'SinglePackageChange', + 'SourceSqlChange', + 'FilterTableColumns', + 'ValueListFilter', + 'IntComparisonFilter', + 'DoubleComparisonFilter', + 'AssignSpecificValue', + 'ApplyHash', + 'RoundToScale', + 'DatabaseEntity', + 'DatabaseInstanceEntity', + 'SchemaEntity', + 'TableEntity', + 'ColumnEntity', + 'ConstraintEntity', + 'IndexEntity', + 'TriggerEntity', + 'ViewEntity', + 'SequenceEntity', + 'StoredProcedureEntity', + 'FunctionEntity', + 'MaterializedViewEntity', + 'SynonymEntity', + 'PackageEntity', + 'UDTEntity', + 'EntityMapping', + 'EntityMappingLogEntry', + 'EntityDdl', + 'EntityIssue', + }, +) + + +class ValuePresentInList(proto.Enum): + r"""Enum used by ValueListFilter to indicate whether the source + value is in the supplied list + + Values: + VALUE_PRESENT_IN_LIST_UNSPECIFIED (0): + Value present in list unspecified + VALUE_PRESENT_IN_LIST_IF_VALUE_LIST (1): + If the source value is in the supplied list at value_list + VALUE_PRESENT_IN_LIST_IF_VALUE_NOT_LIST (2): + If the source value is not in the supplied list at + value_list + """ + VALUE_PRESENT_IN_LIST_UNSPECIFIED = 0 + VALUE_PRESENT_IN_LIST_IF_VALUE_LIST = 1 + VALUE_PRESENT_IN_LIST_IF_VALUE_NOT_LIST = 2 + + +class DatabaseEntityType(proto.Enum): + r"""The type of database entities supported, + + Values: + DATABASE_ENTITY_TYPE_UNSPECIFIED (0): + Unspecified database entity type. + DATABASE_ENTITY_TYPE_SCHEMA (1): + Schema. + DATABASE_ENTITY_TYPE_TABLE (2): + Table. + DATABASE_ENTITY_TYPE_COLUMN (3): + Column. + DATABASE_ENTITY_TYPE_CONSTRAINT (4): + Constraint. + DATABASE_ENTITY_TYPE_INDEX (5): + Index. + DATABASE_ENTITY_TYPE_TRIGGER (6): + Trigger. + DATABASE_ENTITY_TYPE_VIEW (7): + View. + DATABASE_ENTITY_TYPE_SEQUENCE (8): + Sequence. + DATABASE_ENTITY_TYPE_STORED_PROCEDURE (9): + Stored Procedure. + DATABASE_ENTITY_TYPE_FUNCTION (10): + Function. + DATABASE_ENTITY_TYPE_SYNONYM (11): + Synonym. + DATABASE_ENTITY_TYPE_DATABASE_PACKAGE (12): + Package. + DATABASE_ENTITY_TYPE_UDT (13): + UDT. + DATABASE_ENTITY_TYPE_MATERIALIZED_VIEW (14): + Materialized View. + DATABASE_ENTITY_TYPE_DATABASE (15): + Database. + """ + DATABASE_ENTITY_TYPE_UNSPECIFIED = 0 + DATABASE_ENTITY_TYPE_SCHEMA = 1 + DATABASE_ENTITY_TYPE_TABLE = 2 + DATABASE_ENTITY_TYPE_COLUMN = 3 + DATABASE_ENTITY_TYPE_CONSTRAINT = 4 + DATABASE_ENTITY_TYPE_INDEX = 5 + DATABASE_ENTITY_TYPE_TRIGGER = 6 + DATABASE_ENTITY_TYPE_VIEW = 7 + DATABASE_ENTITY_TYPE_SEQUENCE = 8 + DATABASE_ENTITY_TYPE_STORED_PROCEDURE = 9 + DATABASE_ENTITY_TYPE_FUNCTION = 10 + DATABASE_ENTITY_TYPE_SYNONYM = 11 + DATABASE_ENTITY_TYPE_DATABASE_PACKAGE = 12 + DATABASE_ENTITY_TYPE_UDT = 13 + DATABASE_ENTITY_TYPE_MATERIALIZED_VIEW = 14 + DATABASE_ENTITY_TYPE_DATABASE = 15 + + +class EntityNameTransformation(proto.Enum): + r"""Entity Name Transformation Types + + Values: + ENTITY_NAME_TRANSFORMATION_UNSPECIFIED (0): + Entity name transformation unspecified. + ENTITY_NAME_TRANSFORMATION_NO_TRANSFORMATION (1): + No transformation. + ENTITY_NAME_TRANSFORMATION_LOWER_CASE (2): + Transform to lower case. + ENTITY_NAME_TRANSFORMATION_UPPER_CASE (3): + Transform to upper case. + ENTITY_NAME_TRANSFORMATION_CAPITALIZED_CASE (4): + Transform to capitalized case. + """ + ENTITY_NAME_TRANSFORMATION_UNSPECIFIED = 0 + ENTITY_NAME_TRANSFORMATION_NO_TRANSFORMATION = 1 + ENTITY_NAME_TRANSFORMATION_LOWER_CASE = 2 + ENTITY_NAME_TRANSFORMATION_UPPER_CASE = 3 + ENTITY_NAME_TRANSFORMATION_CAPITALIZED_CASE = 4 + + +class BackgroundJobType(proto.Enum): + r"""The types of jobs that can be executed in the background. + + Values: + BACKGROUND_JOB_TYPE_UNSPECIFIED (0): + Unspecified background job type. + BACKGROUND_JOB_TYPE_SOURCE_SEED (1): + Job to seed from the source database. + BACKGROUND_JOB_TYPE_CONVERT (2): + Job to convert the source database into a + draft of the destination database. + BACKGROUND_JOB_TYPE_APPLY_DESTINATION (3): + Job to apply the draft tree onto the + destination. + BACKGROUND_JOB_TYPE_IMPORT_RULES_FILE (5): + Job to import and convert mapping rules from + an external source such as an ora2pg config + file. + """ + BACKGROUND_JOB_TYPE_UNSPECIFIED = 0 + BACKGROUND_JOB_TYPE_SOURCE_SEED = 1 + BACKGROUND_JOB_TYPE_CONVERT = 2 + BACKGROUND_JOB_TYPE_APPLY_DESTINATION = 3 + BACKGROUND_JOB_TYPE_IMPORT_RULES_FILE = 5 + + +class ImportRulesFileFormat(proto.Enum): + r"""The format for the import rules file. + + Values: + IMPORT_RULES_FILE_FORMAT_UNSPECIFIED (0): + Unspecified rules format. + IMPORT_RULES_FILE_FORMAT_HARBOUR_BRIDGE_SESSION_FILE (1): + HarbourBridge session file. + IMPORT_RULES_FILE_FORMAT_ORATOPG_CONFIG_FILE (2): + Ora2Pg configuration file. + """ + IMPORT_RULES_FILE_FORMAT_UNSPECIFIED = 0 + IMPORT_RULES_FILE_FORMAT_HARBOUR_BRIDGE_SESSION_FILE = 1 + IMPORT_RULES_FILE_FORMAT_ORATOPG_CONFIG_FILE = 2 + + +class ValueComparison(proto.Enum): + r"""Enum used by IntComparisonFilter and DoubleComparisonFilter + to indicate the relation between source value and compare value. + + Values: + VALUE_COMPARISON_UNSPECIFIED (0): + Value comparison unspecified. + VALUE_COMPARISON_IF_VALUE_SMALLER_THAN (1): + Value is smaller than the Compare value. + VALUE_COMPARISON_IF_VALUE_SMALLER_EQUAL_THAN (2): + Value is smaller or equal than the Compare + value. + VALUE_COMPARISON_IF_VALUE_LARGER_THAN (3): + Value is larger than the Compare value. + VALUE_COMPARISON_IF_VALUE_LARGER_EQUAL_THAN (4): + Value is larger or equal than the Compare + value. + """ + VALUE_COMPARISON_UNSPECIFIED = 0 + VALUE_COMPARISON_IF_VALUE_SMALLER_THAN = 1 + VALUE_COMPARISON_IF_VALUE_SMALLER_EQUAL_THAN = 2 + VALUE_COMPARISON_IF_VALUE_LARGER_THAN = 3 + VALUE_COMPARISON_IF_VALUE_LARGER_EQUAL_THAN = 4 + + +class NumericFilterOption(proto.Enum): + r"""Specifies the columns on which numeric filter needs to be + applied. + + Values: + NUMERIC_FILTER_OPTION_UNSPECIFIED (0): + Numeric filter option unspecified + NUMERIC_FILTER_OPTION_ALL (1): + Numeric filter option that matches all + numeric columns. + NUMERIC_FILTER_OPTION_LIMIT (2): + Numeric filter option that matches columns + having numeric datatypes with specified + precision and scale within the limited range of + filter. + NUMERIC_FILTER_OPTION_LIMITLESS (3): + Numeric filter option that matches only the + numeric columns with no precision and scale + specified. + """ + NUMERIC_FILTER_OPTION_UNSPECIFIED = 0 + NUMERIC_FILTER_OPTION_ALL = 1 + NUMERIC_FILTER_OPTION_LIMIT = 2 + NUMERIC_FILTER_OPTION_LIMITLESS = 3 + + +class DatabaseEngineInfo(proto.Message): + r"""The type and version of a source or destination database. + + Attributes: + engine (google.cloud.clouddms_v1.types.DatabaseEngine): + Required. Engine type. + version (str): + Required. Engine named version, for example + 12.c.1. + """ + + engine: clouddms_resources.DatabaseEngine = proto.Field( + proto.ENUM, + number=1, + enum=clouddms_resources.DatabaseEngine, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ConversionWorkspace(proto.Message): + r"""The main conversion workspace resource entity. + + Attributes: + name (str): + Full name of the workspace resource, in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + source (google.cloud.clouddms_v1.types.DatabaseEngineInfo): + Required. The source engine details. + destination (google.cloud.clouddms_v1.types.DatabaseEngineInfo): + Required. The destination engine details. + global_settings (MutableMapping[str, str]): + Optional. A generic list of settings for the workspace. The + settings are database pair dependant and can indicate + default behavior for the mapping rules engine or turn on or + off specific features. Such examples can be: + convert_foreign_key_to_interleave=true, skip_triggers=false, + ignore_non_table_synonyms=true + has_uncommitted_changes (bool): + Output only. Whether the workspace has + uncommitted changes (changes which were made + after the workspace was committed). + latest_commit_id (str): + Output only. The latest commit ID. + latest_commit_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the workspace + was committed. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the workspace + resource was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the workspace + resource was last updated. + display_name (str): + Optional. The display name for the workspace. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + source: 'DatabaseEngineInfo' = proto.Field( + proto.MESSAGE, + number=2, + message='DatabaseEngineInfo', + ) + destination: 'DatabaseEngineInfo' = proto.Field( + proto.MESSAGE, + number=3, + message='DatabaseEngineInfo', + ) + global_settings: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + has_uncommitted_changes: bool = proto.Field( + proto.BOOL, + number=5, + ) + latest_commit_id: str = proto.Field( + proto.STRING, + number=6, + ) + latest_commit_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + display_name: str = proto.Field( + proto.STRING, + number=11, + ) + + +class BackgroundJobLogEntry(proto.Message): + r"""Execution log of a background job. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + The background job log entry ID. + job_type (google.cloud.clouddms_v1.types.BackgroundJobType): + The type of job that was executed. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp when the background job was + started. + finish_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp when the background job was + finished. + completion_state (google.cloud.clouddms_v1.types.BackgroundJobLogEntry.JobCompletionState): + Output only. Job completion state, i.e. the + final state after the job completed. + completion_comment (str): + Output only. Job completion comment, such as + how many entities were seeded, how many warnings + were found during conversion, and similar + information. + request_autocommit (bool): + Output only. Whether the client requested the + conversion workspace to be committed after a + successful completion of the job. + seed_job_details (google.cloud.clouddms_v1.types.BackgroundJobLogEntry.SeedJobDetails): + Output only. Seed job details. + + This field is a member of `oneof`_ ``job_details``. + import_rules_job_details (google.cloud.clouddms_v1.types.BackgroundJobLogEntry.ImportRulesJobDetails): + Output only. Import rules job details. + + This field is a member of `oneof`_ ``job_details``. + convert_job_details (google.cloud.clouddms_v1.types.BackgroundJobLogEntry.ConvertJobDetails): + Output only. Convert job details. + + This field is a member of `oneof`_ ``job_details``. + apply_job_details (google.cloud.clouddms_v1.types.BackgroundJobLogEntry.ApplyJobDetails): + Output only. Apply job details. + + This field is a member of `oneof`_ ``job_details``. + """ + class JobCompletionState(proto.Enum): + r"""Final state after a job completes. + + Values: + JOB_COMPLETION_STATE_UNSPECIFIED (0): + The status is not specified. This state is + used when job is not yet finished. + SUCCEEDED (1): + Success. + FAILED (2): + Error. + """ + JOB_COMPLETION_STATE_UNSPECIFIED = 0 + SUCCEEDED = 1 + FAILED = 2 + + class SeedJobDetails(proto.Message): + r"""Details regarding a Seed background job. + + Attributes: + connection_profile (str): + Output only. The connection profile which was + used for the seed job. + """ + + connection_profile: str = proto.Field( + proto.STRING, + number=1, + ) + + class ImportRulesJobDetails(proto.Message): + r"""Details regarding an Import Rules background job. + + Attributes: + files (MutableSequence[str]): + Output only. File names used for the import + rules job. + file_format (google.cloud.clouddms_v1.types.ImportRulesFileFormat): + Output only. The requested file format. + """ + + files: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + file_format: 'ImportRulesFileFormat' = proto.Field( + proto.ENUM, + number=2, + enum='ImportRulesFileFormat', + ) + + class ConvertJobDetails(proto.Message): + r"""Details regarding a Convert background job. + + Attributes: + filter (str): + Output only. AIP-160 based filter used to + specify the entities to convert + """ + + filter: str = proto.Field( + proto.STRING, + number=1, + ) + + class ApplyJobDetails(proto.Message): + r"""Details regarding an Apply background job. + + Attributes: + connection_profile (str): + Output only. The connection profile which was + used for the apply job. + filter (str): + Output only. AIP-160 based filter used to + specify the entities to apply + """ + + connection_profile: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + + id: str = proto.Field( + proto.STRING, + number=1, + ) + job_type: 'BackgroundJobType' = proto.Field( + proto.ENUM, + number=2, + enum='BackgroundJobType', + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + finish_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + completion_state: JobCompletionState = proto.Field( + proto.ENUM, + number=5, + enum=JobCompletionState, + ) + completion_comment: str = proto.Field( + proto.STRING, + number=6, + ) + request_autocommit: bool = proto.Field( + proto.BOOL, + number=7, + ) + seed_job_details: SeedJobDetails = proto.Field( + proto.MESSAGE, + number=100, + oneof='job_details', + message=SeedJobDetails, + ) + import_rules_job_details: ImportRulesJobDetails = proto.Field( + proto.MESSAGE, + number=101, + oneof='job_details', + message=ImportRulesJobDetails, + ) + convert_job_details: ConvertJobDetails = proto.Field( + proto.MESSAGE, + number=102, + oneof='job_details', + message=ConvertJobDetails, + ) + apply_job_details: ApplyJobDetails = proto.Field( + proto.MESSAGE, + number=103, + oneof='job_details', + message=ApplyJobDetails, + ) + + +class MappingRuleFilter(proto.Message): + r"""A filter defining the entities that a mapping rule should be + applied to. When more than one field is specified, the rule is + applied only to entities which match all the fields. + + Attributes: + parent_entity (str): + Optional. The rule should be applied to + entities whose parent entity (fully qualified + name) matches the given value. For example, if + the rule applies to a table entity, the expected + value should be a schema (schema). If the rule + applies to a column or index entity, the + expected value can be either a schema (schema) + or a table (schema.table) + entity_name_prefix (str): + Optional. The rule should be applied to + entities whose non-qualified name starts with + the given prefix. + entity_name_suffix (str): + Optional. The rule should be applied to + entities whose non-qualified name ends with the + given suffix. + entity_name_contains (str): + Optional. The rule should be applied to + entities whose non-qualified name contains the + given string. + entities (MutableSequence[str]): + Optional. The rule should be applied to + specific entities defined by their fully + qualified names. + """ + + parent_entity: str = proto.Field( + proto.STRING, + number=1, + ) + entity_name_prefix: str = proto.Field( + proto.STRING, + number=2, + ) + entity_name_suffix: str = proto.Field( + proto.STRING, + number=3, + ) + entity_name_contains: str = proto.Field( + proto.STRING, + number=4, + ) + entities: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class MappingRule(proto.Message): + r"""Definition of a transformation that is to be applied to a + group of entities in the source schema. Several such + transformations can be applied to an entity sequentially to + define the corresponding entity in the target schema. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Full name of the mapping rule resource, in + the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{set}/mappingRule/{rule}. + display_name (str): + Optional. A human readable name + state (google.cloud.clouddms_v1.types.MappingRule.State): + Optional. The mapping rule state + rule_scope (google.cloud.clouddms_v1.types.DatabaseEntityType): + Required. The rule scope + filter (google.cloud.clouddms_v1.types.MappingRuleFilter): + Required. The rule filter + rule_order (int): + Required. The order in which the rule is + applied. Lower order rules are applied before + higher value rules so they may end up being + overridden. + revision_id (str): + Output only. The revision ID of the mapping + rule. A new revision is committed whenever the + mapping rule is changed in any way. The format + is an 8-character hexadecimal string. + revision_create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp that the revision + was created. + single_entity_rename (google.cloud.clouddms_v1.types.SingleEntityRename): + Optional. Rule to specify how a single entity + should be renamed. + + This field is a member of `oneof`_ ``details``. + multi_entity_rename (google.cloud.clouddms_v1.types.MultiEntityRename): + Optional. Rule to specify how multiple + entities should be renamed. + + This field is a member of `oneof`_ ``details``. + entity_move (google.cloud.clouddms_v1.types.EntityMove): + Optional. Rule to specify how multiple + entities should be relocated into a different + schema. + + This field is a member of `oneof`_ ``details``. + single_column_change (google.cloud.clouddms_v1.types.SingleColumnChange): + Optional. Rule to specify how a single column + is converted. + + This field is a member of `oneof`_ ``details``. + multi_column_data_type_change (google.cloud.clouddms_v1.types.MultiColumnDatatypeChange): + Optional. Rule to specify how multiple + columns should be converted to a different data + type. + + This field is a member of `oneof`_ ``details``. + conditional_column_set_value (google.cloud.clouddms_v1.types.ConditionalColumnSetValue): + Optional. Rule to specify how the data + contained in a column should be transformed + (such as trimmed, rounded, etc) provided that + the data meets certain criteria. + + This field is a member of `oneof`_ ``details``. + convert_rowid_column (google.cloud.clouddms_v1.types.ConvertRowIdToColumn): + Optional. Rule to specify how multiple tables + should be converted with an additional rowid + column. + + This field is a member of `oneof`_ ``details``. + set_table_primary_key (google.cloud.clouddms_v1.types.SetTablePrimaryKey): + Optional. Rule to specify the primary key for + a table + + This field is a member of `oneof`_ ``details``. + single_package_change (google.cloud.clouddms_v1.types.SinglePackageChange): + Optional. Rule to specify how a single + package is converted. + + This field is a member of `oneof`_ ``details``. + source_sql_change (google.cloud.clouddms_v1.types.SourceSqlChange): + Optional. Rule to change the sql code for an + entity, for example, function, procedure. + + This field is a member of `oneof`_ ``details``. + filter_table_columns (google.cloud.clouddms_v1.types.FilterTableColumns): + Optional. Rule to specify the list of columns + to include or exclude from a table. + + This field is a member of `oneof`_ ``details``. + """ + class State(proto.Enum): + r"""The current mapping rule state such as enabled, disabled or + deleted. + + Values: + STATE_UNSPECIFIED (0): + The state of the mapping rule is unknown. + ENABLED (1): + The rule is enabled. + DISABLED (2): + The rule is disabled. + DELETED (3): + The rule is logically deleted. + """ + STATE_UNSPECIFIED = 0 + ENABLED = 1 + DISABLED = 2 + DELETED = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + state: State = proto.Field( + proto.ENUM, + number=3, + enum=State, + ) + rule_scope: 'DatabaseEntityType' = proto.Field( + proto.ENUM, + number=4, + enum='DatabaseEntityType', + ) + filter: 'MappingRuleFilter' = proto.Field( + proto.MESSAGE, + number=5, + message='MappingRuleFilter', + ) + rule_order: int = proto.Field( + proto.INT64, + number=6, + ) + revision_id: str = proto.Field( + proto.STRING, + number=7, + ) + revision_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + single_entity_rename: 'SingleEntityRename' = proto.Field( + proto.MESSAGE, + number=102, + oneof='details', + message='SingleEntityRename', + ) + multi_entity_rename: 'MultiEntityRename' = proto.Field( + proto.MESSAGE, + number=103, + oneof='details', + message='MultiEntityRename', + ) + entity_move: 'EntityMove' = proto.Field( + proto.MESSAGE, + number=105, + oneof='details', + message='EntityMove', + ) + single_column_change: 'SingleColumnChange' = proto.Field( + proto.MESSAGE, + number=106, + oneof='details', + message='SingleColumnChange', + ) + multi_column_data_type_change: 'MultiColumnDatatypeChange' = proto.Field( + proto.MESSAGE, + number=107, + oneof='details', + message='MultiColumnDatatypeChange', + ) + conditional_column_set_value: 'ConditionalColumnSetValue' = proto.Field( + proto.MESSAGE, + number=108, + oneof='details', + message='ConditionalColumnSetValue', + ) + convert_rowid_column: 'ConvertRowIdToColumn' = proto.Field( + proto.MESSAGE, + number=114, + oneof='details', + message='ConvertRowIdToColumn', + ) + set_table_primary_key: 'SetTablePrimaryKey' = proto.Field( + proto.MESSAGE, + number=115, + oneof='details', + message='SetTablePrimaryKey', + ) + single_package_change: 'SinglePackageChange' = proto.Field( + proto.MESSAGE, + number=116, + oneof='details', + message='SinglePackageChange', + ) + source_sql_change: 'SourceSqlChange' = proto.Field( + proto.MESSAGE, + number=117, + oneof='details', + message='SourceSqlChange', + ) + filter_table_columns: 'FilterTableColumns' = proto.Field( + proto.MESSAGE, + number=118, + oneof='details', + message='FilterTableColumns', + ) + + +class SingleEntityRename(proto.Message): + r"""Options to configure rule type SingleEntityRename. + The rule is used to rename an entity. + + The rule filter field can refer to only one entity. + + The rule scope can be one of: Database, Schema, Table, Column, + Constraint, Index, View, Function, Stored Procedure, + Materialized View, Sequence, UDT, Synonym + + Attributes: + new_name (str): + Required. The new name of the destination + entity + """ + + new_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class MultiEntityRename(proto.Message): + r"""Options to configure rule type MultiEntityRename. + The rule is used to rename multiple entities. + + The rule filter field can refer to one or more entities. + + The rule scope can be one of: Database, Schema, Table, Column, + Constraint, Index, View, Function, Stored Procedure, + Materialized View, Sequence, UDT + + Attributes: + new_name_pattern (str): + Optional. The pattern used to generate the new entity's + name. This pattern must include the characters '{name}', + which will be replaced with the name of the original entity. + For example, the pattern 't_{name}' for an entity name jobs + would be converted to 't_jobs'. + + If unspecified, the default value for this field is '{name}' + source_name_transformation (google.cloud.clouddms_v1.types.EntityNameTransformation): + Optional. Additional transformation that can be done on the + source entity name before it is being used by the + new_name_pattern, for example lower case. If no + transformation is desired, use NO_TRANSFORMATION + """ + + new_name_pattern: str = proto.Field( + proto.STRING, + number=1, + ) + source_name_transformation: 'EntityNameTransformation' = proto.Field( + proto.ENUM, + number=2, + enum='EntityNameTransformation', + ) + + +class EntityMove(proto.Message): + r"""Options to configure rule type EntityMove. + The rule is used to move an entity to a new schema. + + The rule filter field can refer to one or more entities. + + The rule scope can be one of: Table, Column, Constraint, Index, + View, Function, Stored Procedure, Materialized View, Sequence, + UDT + + Attributes: + new_schema (str): + Required. The new schema + """ + + new_schema: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SingleColumnChange(proto.Message): + r"""Options to configure rule type SingleColumnChange. + The rule is used to change the properties of a column. + + The rule filter field can refer to one entity. + + The rule scope can be one of: Column. + + When using this rule, if a field is not specified than the + destination column's configuration will be the same as the one + in the source column.. + + Attributes: + data_type (str): + Optional. Column data type name. + charset (str): + Optional. Charset override - instead of table + level charset. + collation (str): + Optional. Collation override - instead of + table level collation. + length (int): + Optional. Column length - e.g. 50 as in + varchar (50) - when relevant. + precision (int): + Optional. Column precision - e.g. 8 as in + double (8,2) - when relevant. + scale (int): + Optional. Column scale - e.g. 2 as in double + (8,2) - when relevant. + fractional_seconds_precision (int): + Optional. Column fractional seconds precision + - e.g. 2 as in timestamp (2) + - when relevant. + array (bool): + Optional. Is the column of array type. + array_length (int): + Optional. The length of the array, only + relevant if the column type is an array. + nullable (bool): + Optional. Is the column nullable. + auto_generated (bool): + Optional. Is the column + auto-generated/identity. + udt (bool): + Optional. Is the column a UDT (User-defined + Type). + custom_features (google.protobuf.struct_pb2.Struct): + Optional. Custom engine specific features. + set_values (MutableSequence[str]): + Optional. Specifies the list of values + allowed in the column. + comment (str): + Optional. Comment associated with the column. + """ + + data_type: str = proto.Field( + proto.STRING, + number=1, + ) + charset: str = proto.Field( + proto.STRING, + number=2, + ) + collation: str = proto.Field( + proto.STRING, + number=3, + ) + length: int = proto.Field( + proto.INT64, + number=4, + ) + precision: int = proto.Field( + proto.INT32, + number=5, + ) + scale: int = proto.Field( + proto.INT32, + number=6, + ) + fractional_seconds_precision: int = proto.Field( + proto.INT32, + number=7, + ) + array: bool = proto.Field( + proto.BOOL, + number=8, + ) + array_length: int = proto.Field( + proto.INT32, + number=9, + ) + nullable: bool = proto.Field( + proto.BOOL, + number=10, + ) + auto_generated: bool = proto.Field( + proto.BOOL, + number=11, + ) + udt: bool = proto.Field( + proto.BOOL, + number=12, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=13, + message=struct_pb2.Struct, + ) + set_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=14, + ) + comment: str = proto.Field( + proto.STRING, + number=15, + ) + + +class MultiColumnDatatypeChange(proto.Message): + r"""Options to configure rule type MultiColumnDatatypeChange. + The rule is used to change the data type and associated + properties of multiple columns at once. + + The rule filter field can refer to one or more entities. + + The rule scope can be one of:Column. + + This rule requires additional filters to be specified beyond the + basic rule filter field, which is the source data type, but the + rule supports additional filtering capabilities such as the + minimum and maximum field length. All additional filters which + are specified are required to be met in order for the rule to be + applied (logical AND between the fields). + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source_data_type_filter (str): + Required. Filter on source data type. + source_text_filter (google.cloud.clouddms_v1.types.SourceTextFilter): + Optional. Filter for text-based data types + like varchar. + + This field is a member of `oneof`_ ``source_filter``. + source_numeric_filter (google.cloud.clouddms_v1.types.SourceNumericFilter): + Optional. Filter for fixed point number data + types such as NUMERIC/NUMBER. + + This field is a member of `oneof`_ ``source_filter``. + new_data_type (str): + Required. New data type. + override_length (int): + Optional. Column length - e.g. varchar (50) - + if not specified and relevant uses the source + column length. + override_scale (int): + Optional. Column scale - when relevant - if + not specified and relevant uses the source + column scale. + override_precision (int): + Optional. Column precision - when relevant - + if not specified and relevant uses the source + column precision. + override_fractional_seconds_precision (int): + Optional. Column fractional seconds precision + - used only for timestamp based datatypes - if + not specified and relevant uses the source + column fractional seconds precision. + custom_features (google.protobuf.struct_pb2.Struct): + Optional. Custom engine specific features. + """ + + source_data_type_filter: str = proto.Field( + proto.STRING, + number=1, + ) + source_text_filter: 'SourceTextFilter' = proto.Field( + proto.MESSAGE, + number=100, + oneof='source_filter', + message='SourceTextFilter', + ) + source_numeric_filter: 'SourceNumericFilter' = proto.Field( + proto.MESSAGE, + number=101, + oneof='source_filter', + message='SourceNumericFilter', + ) + new_data_type: str = proto.Field( + proto.STRING, + number=2, + ) + override_length: int = proto.Field( + proto.INT64, + number=3, + ) + override_scale: int = proto.Field( + proto.INT32, + number=4, + ) + override_precision: int = proto.Field( + proto.INT32, + number=5, + ) + override_fractional_seconds_precision: int = proto.Field( + proto.INT32, + number=6, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=7, + message=struct_pb2.Struct, + ) + + +class SourceTextFilter(proto.Message): + r"""Filter for text-based data types like varchar. + + Attributes: + source_min_length_filter (int): + Optional. The filter will match columns with + length greater than or equal to this number. + source_max_length_filter (int): + Optional. The filter will match columns with + length smaller than or equal to this number. + """ + + source_min_length_filter: int = proto.Field( + proto.INT64, + number=1, + ) + source_max_length_filter: int = proto.Field( + proto.INT64, + number=2, + ) + + +class SourceNumericFilter(proto.Message): + r"""Filter for fixed point number data types such as + NUMERIC/NUMBER + + Attributes: + source_min_scale_filter (int): + Optional. The filter will match columns with + scale greater than or equal to this number. + source_max_scale_filter (int): + Optional. The filter will match columns with + scale smaller than or equal to this number. + source_min_precision_filter (int): + Optional. The filter will match columns with + precision greater than or equal to this number. + source_max_precision_filter (int): + Optional. The filter will match columns with + precision smaller than or equal to this number. + numeric_filter_option (google.cloud.clouddms_v1.types.NumericFilterOption): + Required. Enum to set the option defining the + datatypes numeric filter has to be applied to + """ + + source_min_scale_filter: int = proto.Field( + proto.INT32, + number=1, + ) + source_max_scale_filter: int = proto.Field( + proto.INT32, + number=2, + ) + source_min_precision_filter: int = proto.Field( + proto.INT32, + number=3, + ) + source_max_precision_filter: int = proto.Field( + proto.INT32, + number=4, + ) + numeric_filter_option: 'NumericFilterOption' = proto.Field( + proto.ENUM, + number=5, + enum='NumericFilterOption', + ) + + +class ConditionalColumnSetValue(proto.Message): + r"""Options to configure rule type ConditionalColumnSetValue. + The rule is used to transform the data which is being + replicated/migrated. + + The rule filter field can refer to one or more entities. + + The rule scope can be one of: Column. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source_text_filter (google.cloud.clouddms_v1.types.SourceTextFilter): + Optional. Optional filter on source column + length. Used for text based data types like + varchar. + + This field is a member of `oneof`_ ``source_filter``. + source_numeric_filter (google.cloud.clouddms_v1.types.SourceNumericFilter): + Optional. Optional filter on source column + precision and scale. Used for fixed point + numbers such as NUMERIC/NUMBER data types. + + This field is a member of `oneof`_ ``source_filter``. + value_transformation (google.cloud.clouddms_v1.types.ValueTransformation): + Required. Description of data transformation + during migration. + custom_features (google.protobuf.struct_pb2.Struct): + Optional. Custom engine specific features. + """ + + source_text_filter: 'SourceTextFilter' = proto.Field( + proto.MESSAGE, + number=100, + oneof='source_filter', + message='SourceTextFilter', + ) + source_numeric_filter: 'SourceNumericFilter' = proto.Field( + proto.MESSAGE, + number=101, + oneof='source_filter', + message='SourceNumericFilter', + ) + value_transformation: 'ValueTransformation' = proto.Field( + proto.MESSAGE, + number=1, + message='ValueTransformation', + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + + +class ValueTransformation(proto.Message): + r"""Description of data transformation during migration as part + of the ConditionalColumnSetValue. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + is_null (google.protobuf.empty_pb2.Empty): + Optional. Value is null + + This field is a member of `oneof`_ ``filter``. + value_list (google.cloud.clouddms_v1.types.ValueListFilter): + Optional. Value is found in the specified + list. + + This field is a member of `oneof`_ ``filter``. + int_comparison (google.cloud.clouddms_v1.types.IntComparisonFilter): + Optional. Filter on relation between source + value and compare value of type integer. + + This field is a member of `oneof`_ ``filter``. + double_comparison (google.cloud.clouddms_v1.types.DoubleComparisonFilter): + Optional. Filter on relation between source + value and compare value of type double. + + This field is a member of `oneof`_ ``filter``. + assign_null (google.protobuf.empty_pb2.Empty): + Optional. Set to null + + This field is a member of `oneof`_ ``action``. + assign_specific_value (google.cloud.clouddms_v1.types.AssignSpecificValue): + Optional. Set to a specific value (value is + converted to fit the target data type) + + This field is a member of `oneof`_ ``action``. + assign_min_value (google.protobuf.empty_pb2.Empty): + Optional. Set to min_value - if integer or numeric, will use + int.minvalue, etc + + This field is a member of `oneof`_ ``action``. + assign_max_value (google.protobuf.empty_pb2.Empty): + Optional. Set to max_value - if integer or numeric, will use + int.maxvalue, etc + + This field is a member of `oneof`_ ``action``. + round_scale (google.cloud.clouddms_v1.types.RoundToScale): + Optional. Allows the data to change scale + + This field is a member of `oneof`_ ``action``. + apply_hash (google.cloud.clouddms_v1.types.ApplyHash): + Optional. Applies a hash function on the data + + This field is a member of `oneof`_ ``action``. + """ + + is_null: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=100, + oneof='filter', + message=empty_pb2.Empty, + ) + value_list: 'ValueListFilter' = proto.Field( + proto.MESSAGE, + number=101, + oneof='filter', + message='ValueListFilter', + ) + int_comparison: 'IntComparisonFilter' = proto.Field( + proto.MESSAGE, + number=102, + oneof='filter', + message='IntComparisonFilter', + ) + double_comparison: 'DoubleComparisonFilter' = proto.Field( + proto.MESSAGE, + number=103, + oneof='filter', + message='DoubleComparisonFilter', + ) + assign_null: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=200, + oneof='action', + message=empty_pb2.Empty, + ) + assign_specific_value: 'AssignSpecificValue' = proto.Field( + proto.MESSAGE, + number=201, + oneof='action', + message='AssignSpecificValue', + ) + assign_min_value: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=202, + oneof='action', + message=empty_pb2.Empty, + ) + assign_max_value: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=203, + oneof='action', + message=empty_pb2.Empty, + ) + round_scale: 'RoundToScale' = proto.Field( + proto.MESSAGE, + number=204, + oneof='action', + message='RoundToScale', + ) + apply_hash: 'ApplyHash' = proto.Field( + proto.MESSAGE, + number=205, + oneof='action', + message='ApplyHash', + ) + + +class ConvertRowIdToColumn(proto.Message): + r"""Options to configure rule type ConvertROWIDToColumn. + The rule is used to add column rowid to destination tables based + on an Oracle rowid function/property. + + The rule filter field can refer to one or more entities. + + The rule scope can be one of: Table. + + This rule requires additional filter to be specified beyond the + basic rule filter field, which is whether or not to work on + tables which already have a primary key defined. + + Attributes: + only_if_no_primary_key (bool): + Required. Only work on tables without primary + key defined + """ + + only_if_no_primary_key: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class SetTablePrimaryKey(proto.Message): + r"""Options to configure rule type SetTablePrimaryKey. + The rule is used to specify the columns and name to + configure/alter the primary key of a table. + + The rule filter field can refer to one entity. + + The rule scope can be one of: Table. + + Attributes: + primary_key_columns (MutableSequence[str]): + Required. List of column names for the + primary key + primary_key (str): + Optional. Name for the primary key + """ + + primary_key_columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + primary_key: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SinglePackageChange(proto.Message): + r"""Options to configure rule type SinglePackageChange. + The rule is used to alter the sql code for a package entities. + + The rule filter field can refer to one entity. + + The rule scope can be: Package + + Attributes: + package_description (str): + Optional. Sql code for package description + package_body (str): + Optional. Sql code for package body + """ + + package_description: str = proto.Field( + proto.STRING, + number=1, + ) + package_body: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SourceSqlChange(proto.Message): + r"""Options to configure rule type SourceSqlChange. + The rule is used to alter the sql code for database entities. + + The rule filter field can refer to one entity. + + The rule scope can be: StoredProcedure, Function, Trigger, View + + Attributes: + sql_code (str): + Required. Sql code for source (stored + procedure, function, trigger or view) + """ + + sql_code: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FilterTableColumns(proto.Message): + r"""Options to configure rule type FilterTableColumns. + The rule is used to filter the list of columns to include or + exclude from a table. + + The rule filter field can refer to one entity. + + The rule scope can be: Table + + Only one of the two lists can be specified for the rule. + + Attributes: + include_columns (MutableSequence[str]): + Optional. List of columns to be included for + a particular table. + exclude_columns (MutableSequence[str]): + Optional. List of columns to be excluded for + a particular table. + """ + + include_columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + exclude_columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class ValueListFilter(proto.Message): + r"""A list of values to filter by in ConditionalColumnSetValue + + Attributes: + value_present_list (google.cloud.clouddms_v1.types.ValuePresentInList): + Required. Indicates whether the filter + matches rows with values that are present in the + list or those with values not present in it. + values (MutableSequence[str]): + Required. The list to be used to filter by + ignore_case (bool): + Required. Whether to ignore case when + filtering by values. Defaults to false + """ + + value_present_list: 'ValuePresentInList' = proto.Field( + proto.ENUM, + number=1, + enum='ValuePresentInList', + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + ignore_case: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class IntComparisonFilter(proto.Message): + r"""Filter based on relation between source value and compare + value of type integer in ConditionalColumnSetValue + + Attributes: + value_comparison (google.cloud.clouddms_v1.types.ValueComparison): + Required. Relation between source value and + compare value + value (int): + Required. Integer compare value to be used + """ + + value_comparison: 'ValueComparison' = proto.Field( + proto.ENUM, + number=1, + enum='ValueComparison', + ) + value: int = proto.Field( + proto.INT64, + number=2, + ) + + +class DoubleComparisonFilter(proto.Message): + r"""Filter based on relation between source + value and compare value of type double in + ConditionalColumnSetValue + + Attributes: + value_comparison (google.cloud.clouddms_v1.types.ValueComparison): + Required. Relation between source value and + compare value + value (float): + Required. Double compare value to be used + """ + + value_comparison: 'ValueComparison' = proto.Field( + proto.ENUM, + number=1, + enum='ValueComparison', + ) + value: float = proto.Field( + proto.DOUBLE, + number=2, + ) + + +class AssignSpecificValue(proto.Message): + r"""Set to a specific value (value is converted to fit the target + data type) + + Attributes: + value (str): + Required. Specific value to be assigned + """ + + value: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ApplyHash(proto.Message): + r"""Apply a hash function on the value. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uuid_from_bytes (google.protobuf.empty_pb2.Empty): + Optional. Generate UUID from the data's byte + array + + This field is a member of `oneof`_ ``hash_function``. + """ + + uuid_from_bytes: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=100, + oneof='hash_function', + message=empty_pb2.Empty, + ) + + +class RoundToScale(proto.Message): + r"""This allows the data to change scale, for example if the + source is 2 digits after the decimal point, specify round to + scale value = 2. If for example the value needs to be converted + to an integer, use round to scale value = 0. + + Attributes: + scale (int): + Required. Scale value to be used + """ + + scale: int = proto.Field( + proto.INT32, + number=1, + ) + + +class DatabaseEntity(proto.Message): + r"""The base entity type for all the database related entities. + The message contains the entity name, the name of its parent, + the entity type, and the specific details per entity type. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + short_name (str): + The short name (e.g. table name) of the + entity. + parent_entity (str): + The full name of the parent entity (e.g. + schema name). + tree (google.cloud.clouddms_v1.types.DatabaseEntity.TreeType): + The type of tree the entity belongs to. + entity_type (google.cloud.clouddms_v1.types.DatabaseEntityType): + The type of the database entity (table, view, + index, ...). + mappings (MutableSequence[google.cloud.clouddms_v1.types.EntityMapping]): + Details about entity mappings. + For source tree entities, this holds the draft + entities which were generated by the mapping + rules. + For draft tree entities, this holds the source + entities which were converted to form the draft + entity. + Destination entities will have no mapping + details. + entity_ddl (MutableSequence[google.cloud.clouddms_v1.types.EntityDdl]): + Details about the entity DDL script. Multiple + DDL scripts are provided for child entities such + as a table entity will have one DDL for the + table with additional DDLs for each index, + constraint and such. + issues (MutableSequence[google.cloud.clouddms_v1.types.EntityIssue]): + Details about the various issues found for + the entity. + database (google.cloud.clouddms_v1.types.DatabaseInstanceEntity): + Database. + + This field is a member of `oneof`_ ``entity_body``. + schema (google.cloud.clouddms_v1.types.SchemaEntity): + Schema. + + This field is a member of `oneof`_ ``entity_body``. + table (google.cloud.clouddms_v1.types.TableEntity): + Table. + + This field is a member of `oneof`_ ``entity_body``. + view (google.cloud.clouddms_v1.types.ViewEntity): + View. + + This field is a member of `oneof`_ ``entity_body``. + sequence (google.cloud.clouddms_v1.types.SequenceEntity): + Sequence. + + This field is a member of `oneof`_ ``entity_body``. + stored_procedure (google.cloud.clouddms_v1.types.StoredProcedureEntity): + Stored procedure. + + This field is a member of `oneof`_ ``entity_body``. + database_function (google.cloud.clouddms_v1.types.FunctionEntity): + Function. + + This field is a member of `oneof`_ ``entity_body``. + synonym (google.cloud.clouddms_v1.types.SynonymEntity): + Synonym. + + This field is a member of `oneof`_ ``entity_body``. + database_package (google.cloud.clouddms_v1.types.PackageEntity): + Package. + + This field is a member of `oneof`_ ``entity_body``. + udt (google.cloud.clouddms_v1.types.UDTEntity): + UDT. + + This field is a member of `oneof`_ ``entity_body``. + materialized_view (google.cloud.clouddms_v1.types.MaterializedViewEntity): + Materialized view. + + This field is a member of `oneof`_ ``entity_body``. + """ + class TreeType(proto.Enum): + r"""The type of database entities tree. + + Values: + TREE_TYPE_UNSPECIFIED (0): + Tree type unspecified. + SOURCE (1): + Tree of entities loaded from a source + database. + DRAFT (2): + Tree of entities converted from the source + tree using the mapping rules. + DESTINATION (3): + Tree of entities observed on the destination + database. + """ + TREE_TYPE_UNSPECIFIED = 0 + SOURCE = 1 + DRAFT = 2 + DESTINATION = 3 + + short_name: str = proto.Field( + proto.STRING, + number=1, + ) + parent_entity: str = proto.Field( + proto.STRING, + number=2, + ) + tree: TreeType = proto.Field( + proto.ENUM, + number=3, + enum=TreeType, + ) + entity_type: 'DatabaseEntityType' = proto.Field( + proto.ENUM, + number=4, + enum='DatabaseEntityType', + ) + mappings: MutableSequence['EntityMapping'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='EntityMapping', + ) + entity_ddl: MutableSequence['EntityDdl'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='EntityDdl', + ) + issues: MutableSequence['EntityIssue'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='EntityIssue', + ) + database: 'DatabaseInstanceEntity' = proto.Field( + proto.MESSAGE, + number=101, + oneof='entity_body', + message='DatabaseInstanceEntity', + ) + schema: 'SchemaEntity' = proto.Field( + proto.MESSAGE, + number=102, + oneof='entity_body', + message='SchemaEntity', + ) + table: 'TableEntity' = proto.Field( + proto.MESSAGE, + number=103, + oneof='entity_body', + message='TableEntity', + ) + view: 'ViewEntity' = proto.Field( + proto.MESSAGE, + number=104, + oneof='entity_body', + message='ViewEntity', + ) + sequence: 'SequenceEntity' = proto.Field( + proto.MESSAGE, + number=105, + oneof='entity_body', + message='SequenceEntity', + ) + stored_procedure: 'StoredProcedureEntity' = proto.Field( + proto.MESSAGE, + number=106, + oneof='entity_body', + message='StoredProcedureEntity', + ) + database_function: 'FunctionEntity' = proto.Field( + proto.MESSAGE, + number=107, + oneof='entity_body', + message='FunctionEntity', + ) + synonym: 'SynonymEntity' = proto.Field( + proto.MESSAGE, + number=108, + oneof='entity_body', + message='SynonymEntity', + ) + database_package: 'PackageEntity' = proto.Field( + proto.MESSAGE, + number=109, + oneof='entity_body', + message='PackageEntity', + ) + udt: 'UDTEntity' = proto.Field( + proto.MESSAGE, + number=110, + oneof='entity_body', + message='UDTEntity', + ) + materialized_view: 'MaterializedViewEntity' = proto.Field( + proto.MESSAGE, + number=111, + oneof='entity_body', + message='MaterializedViewEntity', + ) + + +class DatabaseInstanceEntity(proto.Message): + r"""DatabaseInstance acts as a parent entity to other database + entities. + + Attributes: + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=1, + message=struct_pb2.Struct, + ) + + +class SchemaEntity(proto.Message): + r"""Schema typically has no parent entity, but can have a parent + entity DatabaseInstance (for database engines which support it). + For some database engines, the terms schema and user can be + used interchangeably when they refer to a namespace or a + collection of other database entities. Can store additional + information which is schema specific. + + Attributes: + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=1, + message=struct_pb2.Struct, + ) + + +class TableEntity(proto.Message): + r"""Table's parent is a schema. + + Attributes: + columns (MutableSequence[google.cloud.clouddms_v1.types.ColumnEntity]): + Table columns. + constraints (MutableSequence[google.cloud.clouddms_v1.types.ConstraintEntity]): + Table constraints. + indices (MutableSequence[google.cloud.clouddms_v1.types.IndexEntity]): + Table indices. + triggers (MutableSequence[google.cloud.clouddms_v1.types.TriggerEntity]): + Table triggers. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + comment (str): + Comment associated with the table. + """ + + columns: MutableSequence['ColumnEntity'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ColumnEntity', + ) + constraints: MutableSequence['ConstraintEntity'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='ConstraintEntity', + ) + indices: MutableSequence['IndexEntity'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='IndexEntity', + ) + triggers: MutableSequence['TriggerEntity'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='TriggerEntity', + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + comment: str = proto.Field( + proto.STRING, + number=6, + ) + + +class ColumnEntity(proto.Message): + r"""Column is not used as an independent entity, it is retrieved + as part of a Table entity. + + Attributes: + name (str): + Column name. + data_type (str): + Column data type. + charset (str): + Charset override - instead of table level + charset. + collation (str): + Collation override - instead of table level + collation. + length (int): + Column length - e.g. varchar (50). + precision (int): + Column precision - when relevant. + scale (int): + Column scale - when relevant. + fractional_seconds_precision (int): + Column fractional second precision - used for + timestamp based datatypes. + array (bool): + Is the column of array type. + array_length (int): + If the column is array, of which length. + nullable (bool): + Is the column nullable. + auto_generated (bool): + Is the column auto-generated/identity. + udt (bool): + Is the column a UDT. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + set_values (MutableSequence[str]): + Specifies the list of values allowed in the + column. Only used for set data type. + comment (str): + Comment associated with the column. + ordinal_position (int): + Column order in the table. + default_value (str): + Default value of the column. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + data_type: str = proto.Field( + proto.STRING, + number=2, + ) + charset: str = proto.Field( + proto.STRING, + number=3, + ) + collation: str = proto.Field( + proto.STRING, + number=4, + ) + length: int = proto.Field( + proto.INT64, + number=5, + ) + precision: int = proto.Field( + proto.INT32, + number=6, + ) + scale: int = proto.Field( + proto.INT32, + number=7, + ) + fractional_seconds_precision: int = proto.Field( + proto.INT32, + number=8, + ) + array: bool = proto.Field( + proto.BOOL, + number=9, + ) + array_length: int = proto.Field( + proto.INT32, + number=10, + ) + nullable: bool = proto.Field( + proto.BOOL, + number=11, + ) + auto_generated: bool = proto.Field( + proto.BOOL, + number=12, + ) + udt: bool = proto.Field( + proto.BOOL, + number=13, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=14, + message=struct_pb2.Struct, + ) + set_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=15, + ) + comment: str = proto.Field( + proto.STRING, + number=16, + ) + ordinal_position: int = proto.Field( + proto.INT32, + number=17, + ) + default_value: str = proto.Field( + proto.STRING, + number=18, + ) + + +class ConstraintEntity(proto.Message): + r"""Constraint is not used as an independent entity, it is + retrieved as part of another entity such as Table or View. + + Attributes: + name (str): + The name of the table constraint. + type_ (str): + Type of constraint, for example unique, + primary key, foreign key (currently only primary + key is supported). + table_columns (MutableSequence[str]): + Table columns used as part of the Constraint, + for example primary key constraint should list + the columns which constitutes the key. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + reference_columns (MutableSequence[str]): + Reference columns which may be associated with the + constraint. For example, if the constraint is a FOREIGN_KEY, + this represents the list of full names of referenced columns + by the foreign key. + reference_table (str): + Reference table which may be associated with the constraint. + For example, if the constraint is a FOREIGN_KEY, this + represents the list of full name of the referenced table by + the foreign key. + table_name (str): + Table which is associated with the constraint. In case the + constraint is defined on a table, this field is left empty + as this information is stored in parent_name. However, if + constraint is defined on a view, this field stores the table + name on which the view is defined. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + table_columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=4, + message=struct_pb2.Struct, + ) + reference_columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + reference_table: str = proto.Field( + proto.STRING, + number=6, + ) + table_name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class IndexEntity(proto.Message): + r"""Index is not used as an independent entity, it is retrieved + as part of a Table entity. + + Attributes: + name (str): + The name of the index. + type_ (str): + Type of index, for example B-TREE. + table_columns (MutableSequence[str]): + Table columns used as part of the Index, for + example B-TREE index should list the columns + which constitutes the index. + unique (bool): + Boolean value indicating whether the index is + unique. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + table_columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + unique: bool = proto.Field( + proto.BOOL, + number=4, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + + +class TriggerEntity(proto.Message): + r"""Trigger is not used as an independent entity, it is retrieved + as part of a Table entity. + + Attributes: + name (str): + The name of the trigger. + triggering_events (MutableSequence[str]): + The DML, DDL, or database events that fire + the trigger, for example INSERT, UPDATE. + trigger_type (str): + Indicates when the trigger fires, for example + BEFORE STATEMENT, AFTER EACH ROW. + sql_code (str): + The SQL code which creates the trigger. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + triggering_events: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + trigger_type: str = proto.Field( + proto.STRING, + number=3, + ) + sql_code: str = proto.Field( + proto.STRING, + number=4, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + + +class ViewEntity(proto.Message): + r"""View's parent is a schema. + + Attributes: + sql_code (str): + The SQL code which creates the view. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + constraints (MutableSequence[google.cloud.clouddms_v1.types.ConstraintEntity]): + View constraints. + """ + + sql_code: str = proto.Field( + proto.STRING, + number=1, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + constraints: MutableSequence['ConstraintEntity'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='ConstraintEntity', + ) + + +class SequenceEntity(proto.Message): + r"""Sequence's parent is a schema. + + Attributes: + increment (int): + Increment value for the sequence. + start_value (bytes): + Start number for the sequence represented as + bytes to accommodate large. numbers + max_value (bytes): + Maximum number for the sequence represented + as bytes to accommodate large. numbers + min_value (bytes): + Minimum number for the sequence represented + as bytes to accommodate large. numbers + cycle (bool): + Indicates whether the sequence value should + cycle through. + cache (int): + Indicates number of entries to cache / + precreate. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + increment: int = proto.Field( + proto.INT64, + number=1, + ) + start_value: bytes = proto.Field( + proto.BYTES, + number=2, + ) + max_value: bytes = proto.Field( + proto.BYTES, + number=3, + ) + min_value: bytes = proto.Field( + proto.BYTES, + number=4, + ) + cycle: bool = proto.Field( + proto.BOOL, + number=5, + ) + cache: int = proto.Field( + proto.INT64, + number=6, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=7, + message=struct_pb2.Struct, + ) + + +class StoredProcedureEntity(proto.Message): + r"""Stored procedure's parent is a schema. + + Attributes: + sql_code (str): + The SQL code which creates the stored + procedure. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + sql_code: str = proto.Field( + proto.STRING, + number=1, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + + +class FunctionEntity(proto.Message): + r"""Function's parent is a schema. + + Attributes: + sql_code (str): + The SQL code which creates the function. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + sql_code: str = proto.Field( + proto.STRING, + number=1, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + + +class MaterializedViewEntity(proto.Message): + r"""MaterializedView's parent is a schema. + + Attributes: + sql_code (str): + The SQL code which creates the view. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + sql_code: str = proto.Field( + proto.STRING, + number=1, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + + +class SynonymEntity(proto.Message): + r"""Synonym's parent is a schema. + + Attributes: + source_entity (str): + The name of the entity for which the synonym + is being created (the source). + source_type (google.cloud.clouddms_v1.types.DatabaseEntityType): + The type of the entity for which the synonym + is being created (usually a table or a + sequence). + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + source_entity: str = proto.Field( + proto.STRING, + number=1, + ) + source_type: 'DatabaseEntityType' = proto.Field( + proto.ENUM, + number=2, + enum='DatabaseEntityType', + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Struct, + ) + + +class PackageEntity(proto.Message): + r"""Package's parent is a schema. + + Attributes: + package_sql_code (str): + The SQL code which creates the package. + package_body (str): + The SQL code which creates the package body. + If the package specification has cursors or + subprograms, then the package body is mandatory. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + package_sql_code: str = proto.Field( + proto.STRING, + number=1, + ) + package_body: str = proto.Field( + proto.STRING, + number=2, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Struct, + ) + + +class UDTEntity(proto.Message): + r"""UDT's parent is a schema. + + Attributes: + udt_sql_code (str): + The SQL code which creates the udt. + udt_body (str): + The SQL code which creates the udt body. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + udt_sql_code: str = proto.Field( + proto.STRING, + number=1, + ) + udt_body: str = proto.Field( + proto.STRING, + number=2, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Struct, + ) + + +class EntityMapping(proto.Message): + r"""Details of the mappings of a database entity. + + Attributes: + source_entity (str): + Source entity full name. + The source entity can also be a column, index or + constraint using the same naming notation + schema.table.column. + draft_entity (str): + Target entity full name. + The draft entity can also include a column, + index or constraint using the same naming + notation schema.table.column. + source_type (google.cloud.clouddms_v1.types.DatabaseEntityType): + Type of source entity. + draft_type (google.cloud.clouddms_v1.types.DatabaseEntityType): + Type of draft entity. + mapping_log (MutableSequence[google.cloud.clouddms_v1.types.EntityMappingLogEntry]): + Entity mapping log entries. + Multiple rules can be effective and contribute + changes to a converted entity, such as a rule + can handle the entity name, another rule can + handle an entity type. In addition, rules which + did not change the entity are also logged along + with the reason preventing them to do so. + """ + + source_entity: str = proto.Field( + proto.STRING, + number=1, + ) + draft_entity: str = proto.Field( + proto.STRING, + number=2, + ) + source_type: 'DatabaseEntityType' = proto.Field( + proto.ENUM, + number=4, + enum='DatabaseEntityType', + ) + draft_type: 'DatabaseEntityType' = proto.Field( + proto.ENUM, + number=5, + enum='DatabaseEntityType', + ) + mapping_log: MutableSequence['EntityMappingLogEntry'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='EntityMappingLogEntry', + ) + + +class EntityMappingLogEntry(proto.Message): + r"""A single record of a rule which was used for a mapping. + + Attributes: + rule_id (str): + Which rule caused this log entry. + rule_revision_id (str): + Rule revision ID. + mapping_comment (str): + Comment. + """ + + rule_id: str = proto.Field( + proto.STRING, + number=1, + ) + rule_revision_id: str = proto.Field( + proto.STRING, + number=2, + ) + mapping_comment: str = proto.Field( + proto.STRING, + number=3, + ) + + +class EntityDdl(proto.Message): + r"""A single DDL statement for a specific entity + + Attributes: + ddl_type (str): + Type of DDL (Create, Alter). + entity (str): + The name of the database entity the ddl + refers to. + ddl (str): + The actual ddl code. + entity_type (google.cloud.clouddms_v1.types.DatabaseEntityType): + The entity type (if the DDL is for a sub + entity). + issue_id (MutableSequence[str]): + EntityIssues found for this ddl. + """ + + ddl_type: str = proto.Field( + proto.STRING, + number=1, + ) + entity: str = proto.Field( + proto.STRING, + number=2, + ) + ddl: str = proto.Field( + proto.STRING, + number=3, + ) + entity_type: 'DatabaseEntityType' = proto.Field( + proto.ENUM, + number=4, + enum='DatabaseEntityType', + ) + issue_id: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=100, + ) + + +class EntityIssue(proto.Message): + r"""Issue related to the entity. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + Unique Issue ID. + type_ (google.cloud.clouddms_v1.types.EntityIssue.IssueType): + The type of the issue. + severity (google.cloud.clouddms_v1.types.EntityIssue.IssueSeverity): + Severity of the issue + message (str): + Issue detailed message + code (str): + Error/Warning code + ddl (str): + The ddl which caused the issue, if relevant. + + This field is a member of `oneof`_ ``_ddl``. + position (google.cloud.clouddms_v1.types.EntityIssue.Position): + The position of the issue found, if relevant. + + This field is a member of `oneof`_ ``_position``. + entity_type (google.cloud.clouddms_v1.types.DatabaseEntityType): + The entity type (if the DDL is for a sub + entity). + """ + class IssueType(proto.Enum): + r"""Type of issue. + + Values: + ISSUE_TYPE_UNSPECIFIED (0): + Unspecified issue type. + ISSUE_TYPE_DDL (1): + Issue originated from the DDL + ISSUE_TYPE_APPLY (2): + Issue originated during the apply process + ISSUE_TYPE_CONVERT (3): + Issue originated during the convert process + """ + ISSUE_TYPE_UNSPECIFIED = 0 + ISSUE_TYPE_DDL = 1 + ISSUE_TYPE_APPLY = 2 + ISSUE_TYPE_CONVERT = 3 + + class IssueSeverity(proto.Enum): + r"""Severity of issue. + + Values: + ISSUE_SEVERITY_UNSPECIFIED (0): + Unspecified issue severity + ISSUE_SEVERITY_INFO (1): + Info + ISSUE_SEVERITY_WARNING (2): + Warning + ISSUE_SEVERITY_ERROR (3): + Error + """ + ISSUE_SEVERITY_UNSPECIFIED = 0 + ISSUE_SEVERITY_INFO = 1 + ISSUE_SEVERITY_WARNING = 2 + ISSUE_SEVERITY_ERROR = 3 + + class Position(proto.Message): + r"""Issue position. + + Attributes: + line (int): + Issue line number + column (int): + Issue column number + offset (int): + Issue offset + length (int): + Issue length + """ + + line: int = proto.Field( + proto.INT32, + number=1, + ) + column: int = proto.Field( + proto.INT32, + number=2, + ) + offset: int = proto.Field( + proto.INT32, + number=3, + ) + length: int = proto.Field( + proto.INT32, + number=4, + ) + + id: str = proto.Field( + proto.STRING, + number=1, + ) + type_: IssueType = proto.Field( + proto.ENUM, + number=2, + enum=IssueType, + ) + severity: IssueSeverity = proto.Field( + proto.ENUM, + number=3, + enum=IssueSeverity, + ) + message: str = proto.Field( + proto.STRING, + number=4, + ) + code: str = proto.Field( + proto.STRING, + number=5, + ) + ddl: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) + position: Position = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message=Position, + ) + entity_type: 'DatabaseEntityType' = proto.Field( + proto.ENUM, + number=8, + enum='DatabaseEntityType', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dms/v1/mypy.ini b/owl-bot-staging/google-cloud-dms/v1/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-cloud-dms/v1/noxfile.py b/owl-bot-staging/google-cloud-dms/v1/noxfile.py new file mode 100644 index 000000000000..f984a3b7b953 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/noxfile.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-cloud-dms' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.13" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): + """Run the unit test suite.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/clouddms_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + +@nox.session(python=ALL_PYTHON[-1]) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): + """Run the unit test suite against pre-release versions of dependencies.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/clouddms_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py new file mode 100644 index 000000000000..180674d4a0a5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApplyConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_apply_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ApplyConversionWorkspaceRequest( + connection_profile="connection_profile_value", + name="name_value", + ) + + # Make the request + operation = client.apply_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py new file mode 100644 index 000000000000..12fa2e5f25d9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApplyConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_apply_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ApplyConversionWorkspaceRequest( + connection_profile="connection_profile_value", + name="name_value", + ) + + # Make the request + operation = client.apply_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py new file mode 100644 index 000000000000..8aae0f88340a --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CommitConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_commit_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.CommitConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.commit_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py new file mode 100644 index 000000000000..4148247ded3a --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CommitConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_commit_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.CommitConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.commit_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py new file mode 100644 index 000000000000..a3723039c44a --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ConvertConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_convert_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ConvertConversionWorkspaceRequest( + ) + + # Make the request + operation = client.convert_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py new file mode 100644 index 000000000000..eef6936ec89b --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ConvertConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_convert_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ConvertConversionWorkspaceRequest( + ) + + # Make the request + operation = client.convert_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_async.py new file mode 100644 index 000000000000..a044ad8d31bd --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_create_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.CreateConnectionProfileRequest( + parent="parent_value", + connection_profile_id="connection_profile_id_value", + connection_profile=connection_profile, + ) + + # Make the request + operation = client.create_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py new file mode 100644 index 000000000000..a384e0f7b2cb --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_create_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.CreateConnectionProfileRequest( + parent="parent_value", + connection_profile_id="connection_profile_id_value", + connection_profile=connection_profile, + ) + + # Make the request + operation = client.create_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py new file mode 100644 index 000000000000..0418d88486dd --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_create_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.CreateConversionWorkspaceRequest( + parent="parent_value", + conversion_workspace_id="conversion_workspace_id_value", + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.create_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py new file mode 100644 index 000000000000..4231afa8502e --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_create_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.CreateConversionWorkspaceRequest( + parent="parent_value", + conversion_workspace_id="conversion_workspace_id_value", + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.create_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_mapping_rule_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_mapping_rule_async.py new file mode 100644 index 000000000000..7e63da75a746 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_mapping_rule_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMappingRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateMappingRule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_create_mapping_rule(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + mapping_rule = clouddms_v1.MappingRule() + mapping_rule.single_entity_rename.new_name = "new_name_value" + mapping_rule.rule_scope = "DATABASE_ENTITY_TYPE_DATABASE" + mapping_rule.rule_order = 1075 + + request = clouddms_v1.CreateMappingRuleRequest( + parent="parent_value", + mapping_rule_id="mapping_rule_id_value", + mapping_rule=mapping_rule, + ) + + # Make the request + response = await client.create_mapping_rule(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateMappingRule_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_mapping_rule_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_mapping_rule_sync.py new file mode 100644 index 000000000000..9a268d3e87f6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_mapping_rule_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMappingRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateMappingRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_create_mapping_rule(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + mapping_rule = clouddms_v1.MappingRule() + mapping_rule.single_entity_rename.new_name = "new_name_value" + mapping_rule.rule_scope = "DATABASE_ENTITY_TYPE_DATABASE" + mapping_rule.rule_order = 1075 + + request = clouddms_v1.CreateMappingRuleRequest( + parent="parent_value", + mapping_rule_id="mapping_rule_id_value", + mapping_rule=mapping_rule, + ) + + # Make the request + response = client.create_mapping_rule(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateMappingRule_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_async.py new file mode 100644 index 000000000000..f229a5a60447 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_async.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_create_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.CreateMigrationJobRequest( + parent="parent_value", + migration_job_id="migration_job_id_value", + migration_job=migration_job, + ) + + # Make the request + operation = client.create_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateMigrationJob_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_sync.py new file mode 100644 index 000000000000..d531c2b3f39f --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_sync.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_create_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.CreateMigrationJobRequest( + parent="parent_value", + migration_job_id="migration_job_id_value", + migration_job=migration_job, + ) + + # Make the request + operation = client.create_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateMigrationJob_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py new file mode 100644 index 000000000000..27294258cacc --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_create_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + private_connection = clouddms_v1.PrivateConnection() + private_connection.vpc_peering_config.vpc_name = "vpc_name_value" + private_connection.vpc_peering_config.subnet = "subnet_value" + + request = clouddms_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py new file mode 100644 index 000000000000..3b6e0ce68020 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_create_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + private_connection = clouddms_v1.PrivateConnection() + private_connection.vpc_peering_config.vpc_name = "vpc_name_value" + private_connection.vpc_peering_config.subnet = "subnet_value" + + request = clouddms_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py new file mode 100644 index 000000000000..dea58f4fe8f2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_delete_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConnectionProfileRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py new file mode 100644 index 000000000000..95ce9fcef576 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_delete_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConnectionProfileRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py new file mode 100644 index 000000000000..041c31348908 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_delete_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py new file mode 100644 index 000000000000..774085f3ec31 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_delete_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_mapping_rule_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_mapping_rule_async.py new file mode 100644 index 000000000000..2987849d2194 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_mapping_rule_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMappingRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteMappingRule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_delete_mapping_rule(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteMappingRuleRequest( + name="name_value", + ) + + # Make the request + await client.delete_mapping_rule(request=request) + + +# [END datamigration_v1_generated_DataMigrationService_DeleteMappingRule_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_mapping_rule_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_mapping_rule_sync.py new file mode 100644 index 000000000000..c96b107cac15 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_mapping_rule_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMappingRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteMappingRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_delete_mapping_rule(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteMappingRuleRequest( + name="name_value", + ) + + # Make the request + client.delete_mapping_rule(request=request) + + +# [END datamigration_v1_generated_DataMigrationService_DeleteMappingRule_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_async.py new file mode 100644 index 000000000000..6341a4554b30 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_delete_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteMigrationJobRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py new file mode 100644 index 000000000000..198d850c49fe --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_delete_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteMigrationJobRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py new file mode 100644 index 000000000000..694c1716f357 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_delete_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeletePrivateConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py new file mode 100644 index 000000000000..8aac124cd53b --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_delete_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeletePrivateConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py new file mode 100644 index 000000000000..be245b6e6fde --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DescribeConversionWorkspaceRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_describe_conversion_workspace_revisions(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = await client.describe_conversion_workspace_revisions(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py new file mode 100644 index 000000000000..c8e8865866d6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DescribeConversionWorkspaceRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_describe_conversion_workspace_revisions(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = client.describe_conversion_workspace_revisions(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py new file mode 100644 index 000000000000..ea2550249bda --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DescribeDatabaseEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_describe_database_entities(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeDatabaseEntitiesRequest( + conversion_workspace="conversion_workspace_value", + tree="DESTINATION_TREE", + ) + + # Make the request + page_result = client.describe_database_entities(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py new file mode 100644 index 000000000000..999f45930899 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DescribeDatabaseEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_describe_database_entities(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeDatabaseEntitiesRequest( + conversion_workspace="conversion_workspace_value", + tree="DESTINATION_TREE", + ) + + # Make the request + page_result = client.describe_database_entities(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py new file mode 100644 index 000000000000..0835a61f1d23 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchStaticIps +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_FetchStaticIps_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_fetch_static_ips(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.FetchStaticIpsRequest( + name="name_value", + ) + + # Make the request + page_result = client.fetch_static_ips(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_FetchStaticIps_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py new file mode 100644 index 000000000000..e5bba8997e95 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchStaticIps +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_FetchStaticIps_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_fetch_static_ips(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.FetchStaticIpsRequest( + name="name_value", + ) + + # Make the request + page_result = client.fetch_static_ips(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_FetchStaticIps_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py new file mode 100644 index 000000000000..2feba166b27c --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateSshScript +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GenerateSshScript_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_generate_ssh_script(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + vm_creation_config = clouddms_v1.VmCreationConfig() + vm_creation_config.vm_machine_type = "vm_machine_type_value" + + request = clouddms_v1.GenerateSshScriptRequest( + vm_creation_config=vm_creation_config, + vm="vm_value", + ) + + # Make the request + response = await client.generate_ssh_script(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GenerateSshScript_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py new file mode 100644 index 000000000000..520ff135704a --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateSshScript +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GenerateSshScript_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_generate_ssh_script(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + vm_creation_config = clouddms_v1.VmCreationConfig() + vm_creation_config.vm_machine_type = "vm_machine_type_value" + + request = clouddms_v1.GenerateSshScriptRequest( + vm_creation_config=vm_creation_config, + vm="vm_value", + ) + + # Make the request + response = client.generate_ssh_script(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GenerateSshScript_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_tcp_proxy_script_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_tcp_proxy_script_async.py new file mode 100644 index 000000000000..22fd531281f5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_tcp_proxy_script_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateTcpProxyScript +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GenerateTcpProxyScript_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_generate_tcp_proxy_script(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GenerateTcpProxyScriptRequest( + vm_name="vm_name_value", + vm_machine_type="vm_machine_type_value", + vm_subnet="vm_subnet_value", + ) + + # Make the request + response = await client.generate_tcp_proxy_script(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GenerateTcpProxyScript_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_tcp_proxy_script_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_tcp_proxy_script_sync.py new file mode 100644 index 000000000000..af2b917c1a28 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_tcp_proxy_script_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateTcpProxyScript +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GenerateTcpProxyScript_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_generate_tcp_proxy_script(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GenerateTcpProxyScriptRequest( + vm_name="vm_name_value", + vm_machine_type="vm_machine_type_value", + vm_subnet="vm_subnet_value", + ) + + # Make the request + response = client.generate_tcp_proxy_script(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GenerateTcpProxyScript_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_async.py new file mode 100644 index 000000000000..5feebd1faeeb --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetConnectionProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_get_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConnectionProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_connection_profile(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetConnectionProfile_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py new file mode 100644 index 000000000000..f437f91bfea3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetConnectionProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_get_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConnectionProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_connection_profile(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetConnectionProfile_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py new file mode 100644 index 000000000000..bbfde57c532c --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_get_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conversion_workspace(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py new file mode 100644 index 000000000000..c4df1c1159ae --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_get_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = client.get_conversion_workspace(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_mapping_rule_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_mapping_rule_async.py new file mode 100644 index 000000000000..adaea0633ba6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_mapping_rule_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMappingRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetMappingRule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_get_mapping_rule(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetMappingRuleRequest( + name="name_value", + ) + + # Make the request + response = await client.get_mapping_rule(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetMappingRule_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_mapping_rule_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_mapping_rule_sync.py new file mode 100644 index 000000000000..890e94ca3038 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_mapping_rule_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMappingRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetMappingRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_get_mapping_rule(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetMappingRuleRequest( + name="name_value", + ) + + # Make the request + response = client.get_mapping_rule(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetMappingRule_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_async.py new file mode 100644 index 000000000000..0d5dbde56993 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_get_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetMigrationJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_migration_job(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetMigrationJob_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_sync.py new file mode 100644 index 000000000000..5f5161d93e97 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_get_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetMigrationJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_job(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetMigrationJob_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py new file mode 100644 index 000000000000..ed7e97b80718 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetPrivateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_get_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetPrivateConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_private_connection(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetPrivateConnection_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py new file mode 100644 index 000000000000..85083c9afba1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetPrivateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_get_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetPrivateConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_private_connection(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetPrivateConnection_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py new file mode 100644 index 000000000000..ae19af6c241e --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportMappingRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ImportMappingRules_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_import_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + rules_files = clouddms_v1.RulesFile() + rules_files.rules_source_filename = "rules_source_filename_value" + rules_files.rules_content = "rules_content_value" + + request = clouddms_v1.ImportMappingRulesRequest( + parent="parent_value", + rules_format="IMPORT_RULES_FILE_FORMAT_ORATOPG_CONFIG_FILE", + rules_files=rules_files, + auto_commit=True, + ) + + # Make the request + operation = client.import_mapping_rules(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ImportMappingRules_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py new file mode 100644 index 000000000000..7af80d1ee953 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportMappingRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ImportMappingRules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_import_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + rules_files = clouddms_v1.RulesFile() + rules_files.rules_source_filename = "rules_source_filename_value" + rules_files.rules_content = "rules_content_value" + + request = clouddms_v1.ImportMappingRulesRequest( + parent="parent_value", + rules_format="IMPORT_RULES_FILE_FORMAT_ORATOPG_CONFIG_FILE", + rules_files=rules_files, + auto_commit=True, + ) + + # Make the request + operation = client.import_mapping_rules(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ImportMappingRules_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py new file mode 100644 index 000000000000..75f50381d47b --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConnectionProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_list_connection_profiles(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConnectionProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connection_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py new file mode 100644 index 000000000000..428fa19ed8e1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConnectionProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_list_connection_profiles(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConnectionProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connection_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py new file mode 100644 index 000000000000..55eaa4dc65dd --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversionWorkspaces +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_list_conversion_workspaces(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConversionWorkspacesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversion_workspaces(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py new file mode 100644 index 000000000000..f757ac000993 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversionWorkspaces +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_list_conversion_workspaces(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConversionWorkspacesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversion_workspaces(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_mapping_rules_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_mapping_rules_async.py new file mode 100644 index 000000000000..e504cb28cf41 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_mapping_rules_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMappingRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListMappingRules_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_list_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListMappingRulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mapping_rules(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListMappingRules_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_mapping_rules_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_mapping_rules_sync.py new file mode 100644 index 000000000000..daaa244c302e --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_mapping_rules_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMappingRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListMappingRules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_list_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListMappingRulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mapping_rules(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListMappingRules_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py new file mode 100644 index 000000000000..bb10f90b7bac --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListMigrationJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_list_migration_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListMigrationJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListMigrationJobs_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py new file mode 100644 index 000000000000..d6815ceca462 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListMigrationJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_list_migration_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListMigrationJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListMigrationJobs_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py new file mode 100644 index 000000000000..e342235f3c2a --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPrivateConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListPrivateConnections_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_list_private_connections(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListPrivateConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_private_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListPrivateConnections_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py new file mode 100644 index 000000000000..0db73f218f7b --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPrivateConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListPrivateConnections_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_list_private_connections(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListPrivateConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_private_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListPrivateConnections_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_async.py new file mode 100644 index 000000000000..2c8a306d0e30 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PromoteMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_promote_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.PromoteMigrationJobRequest( + ) + + # Make the request + operation = client.promote_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py new file mode 100644 index 000000000000..93160785d0c6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PromoteMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_promote_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.PromoteMigrationJobRequest( + ) + + # Make the request + operation = client.promote_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_async.py new file mode 100644 index 000000000000..f72071784c43 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestartMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_RestartMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_restart_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.RestartMigrationJobRequest( + ) + + # Make the request + operation = client.restart_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_RestartMigrationJob_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py new file mode 100644 index 000000000000..e87e838e4c45 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestartMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_RestartMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_restart_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.RestartMigrationJobRequest( + ) + + # Make the request + operation = client.restart_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_RestartMigrationJob_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_async.py new file mode 100644 index 000000000000..35db9d1dd80d --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ResumeMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_resume_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ResumeMigrationJobRequest( + ) + + # Make the request + operation = client.resume_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py new file mode 100644 index 000000000000..bc8bcbe9fbfd --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ResumeMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_resume_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ResumeMigrationJobRequest( + ) + + # Make the request + operation = client.resume_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py new file mode 100644 index 000000000000..674506f305db --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RollbackConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_rollback_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.RollbackConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py new file mode 100644 index 000000000000..07783464ff67 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RollbackConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_rollback_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.RollbackConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py new file mode 100644 index 000000000000..8e76cbe7b556 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchBackgroundJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_search_background_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.SearchBackgroundJobsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = await client.search_background_jobs(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py new file mode 100644 index 000000000000..e895e1fb628b --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchBackgroundJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_search_background_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.SearchBackgroundJobsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = client.search_background_jobs(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py new file mode 100644 index 000000000000..758caeeff645 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SeedConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_seed_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.SeedConversionWorkspaceRequest( + source_connection_profile="source_connection_profile_value", + ) + + # Make the request + operation = client.seed_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py new file mode 100644 index 000000000000..ff7361b2d747 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SeedConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_seed_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.SeedConversionWorkspaceRequest( + source_connection_profile="source_connection_profile_value", + ) + + # Make the request + operation = client.seed_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_async.py new file mode 100644 index 000000000000..f7aee09b4434 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_StartMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_start_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.StartMigrationJobRequest( + ) + + # Make the request + operation = client.start_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_StartMigrationJob_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_sync.py new file mode 100644 index 000000000000..a79560f3fa56 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_StartMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_start_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.StartMigrationJobRequest( + ) + + # Make the request + operation = client.start_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_StartMigrationJob_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_async.py new file mode 100644 index 000000000000..f6a8185ebaa0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_StopMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_stop_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.StopMigrationJobRequest( + ) + + # Make the request + operation = client.stop_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_StopMigrationJob_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py new file mode 100644 index 000000000000..8727c2fa669f --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_StopMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_stop_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.StopMigrationJobRequest( + ) + + # Make the request + operation = client.stop_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_StopMigrationJob_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_async.py new file mode 100644 index 000000000000..fdfd7329d1ba --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_update_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.UpdateConnectionProfileRequest( + connection_profile=connection_profile, + ) + + # Make the request + operation = client.update_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py new file mode 100644 index 000000000000..fae3c2959f4f --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_update_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.UpdateConnectionProfileRequest( + connection_profile=connection_profile, + ) + + # Make the request + operation = client.update_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py new file mode 100644 index 000000000000..e1bbc1de646b --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_update_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.UpdateConversionWorkspaceRequest( + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.update_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py new file mode 100644 index 000000000000..6f0c30285de6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_update_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.UpdateConversionWorkspaceRequest( + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.update_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_async.py new file mode 100644 index 000000000000..ae737d3853a6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_update_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.UpdateMigrationJobRequest( + migration_job=migration_job, + ) + + # Make the request + operation = client.update_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_sync.py new file mode 100644 index 000000000000..e325600f11e9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_update_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.UpdateMigrationJobRequest( + migration_job=migration_job, + ) + + # Make the request + operation = client.update_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_async.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_async.py new file mode 100644 index 000000000000..b779a1e0465e --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for VerifyMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_verify_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.VerifyMigrationJobRequest( + ) + + # Make the request + operation = client.verify_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_async] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py new file mode 100644 index 000000000000..abc3e3832f1f --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for VerifyMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_verify_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.VerifyMigrationJobRequest( + ) + + # Make the request + operation = client.verify_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_sync] diff --git a/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json new file mode 100644 index 000000000000..92f5517525f3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json @@ -0,0 +1,6578 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.clouddms.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-dms", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.apply_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ApplyConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ApplyConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "apply_conversion_workspace" + }, + "description": "Sample for ApplyConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.apply_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ApplyConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ApplyConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "apply_conversion_workspace" + }, + "description": "Sample for ApplyConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.commit_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CommitConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CommitConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "commit_conversion_workspace" + }, + "description": "Sample for CommitConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.commit_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CommitConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CommitConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "commit_conversion_workspace" + }, + "description": "Sample for CommitConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.convert_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ConvertConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ConvertConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "convert_conversion_workspace" + }, + "description": "Sample for ConvertConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.convert_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ConvertConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ConvertConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "convert_conversion_workspace" + }, + "description": "Sample for ConvertConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateConnectionProfileRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "connection_profile", + "type": "google.cloud.clouddms_v1.types.ConnectionProfile" + }, + { + "name": "connection_profile_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_connection_profile" + }, + "description": "Sample for CreateConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_create_connection_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_connection_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateConnectionProfileRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "connection_profile", + "type": "google.cloud.clouddms_v1.types.ConnectionProfile" + }, + { + "name": "connection_profile_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_connection_profile" + }, + "description": "Sample for CreateConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "conversion_workspace_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_conversion_workspace" + }, + "description": "Sample for CreateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "conversion_workspace_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_conversion_workspace" + }, + "description": "Sample for CreateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_mapping_rule", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMappingRule", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateMappingRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateMappingRuleRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "mapping_rule", + "type": "google.cloud.clouddms_v1.types.MappingRule" + }, + { + "name": "mapping_rule_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.MappingRule", + "shortName": "create_mapping_rule" + }, + "description": "Sample for CreateMappingRule", + "file": "datamigration_v1_generated_data_migration_service_create_mapping_rule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMappingRule_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_mapping_rule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_mapping_rule", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMappingRule", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateMappingRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateMappingRuleRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "mapping_rule", + "type": "google.cloud.clouddms_v1.types.MappingRule" + }, + { + "name": "mapping_rule_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.MappingRule", + "shortName": "create_mapping_rule" + }, + "description": "Sample for CreateMappingRule", + "file": "datamigration_v1_generated_data_migration_service_create_mapping_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMappingRule_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_mapping_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateMigrationJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "migration_job", + "type": "google.cloud.clouddms_v1.types.MigrationJob" + }, + { + "name": "migration_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_migration_job" + }, + "description": "Sample for CreateMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_create_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMigrationJob_async", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateMigrationJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "migration_job", + "type": "google.cloud.clouddms_v1.types.MigrationJob" + }, + { + "name": "migration_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_migration_job" + }, + "description": "Sample for CreateMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_create_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMigrationJob_sync", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreatePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreatePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "private_connection", + "type": "google.cloud.clouddms_v1.types.PrivateConnection" + }, + { + "name": "private_connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_private_connection" + }, + "description": "Sample for CreatePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_create_private_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_private_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreatePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreatePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "private_connection", + "type": "google.cloud.clouddms_v1.types.PrivateConnection" + }, + { + "name": "private_connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_private_connection" + }, + "description": "Sample for CreatePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_create_private_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_private_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_connection_profile" + }, + "description": "Sample for DeleteConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_connection_profile" + }, + "description": "Sample for DeleteConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_conversion_workspace" + }, + "description": "Sample for DeleteConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_conversion_workspace" + }, + "description": "Sample for DeleteConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_mapping_rule", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMappingRule", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteMappingRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteMappingRuleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_mapping_rule" + }, + "description": "Sample for DeleteMappingRule", + "file": "datamigration_v1_generated_data_migration_service_delete_mapping_rule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMappingRule_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_mapping_rule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_mapping_rule", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMappingRule", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteMappingRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteMappingRuleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_mapping_rule" + }, + "description": "Sample for DeleteMappingRule", + "file": "datamigration_v1_generated_data_migration_service_delete_mapping_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMappingRule_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_mapping_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_migration_job" + }, + "description": "Sample for DeleteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_delete_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_migration_job" + }, + "description": "Sample for DeleteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeletePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeletePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_private_connection" + }, + "description": "Sample for DeletePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_delete_private_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_private_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeletePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeletePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_private_connection" + }, + "description": "Sample for DeletePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.describe_conversion_workspace_revisions", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeConversionWorkspaceRevisions", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeConversionWorkspaceRevisions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse", + "shortName": "describe_conversion_workspace_revisions" + }, + "description": "Sample for DescribeConversionWorkspaceRevisions", + "file": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.describe_conversion_workspace_revisions", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeConversionWorkspaceRevisions", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeConversionWorkspaceRevisions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse", + "shortName": "describe_conversion_workspace_revisions" + }, + "description": "Sample for DescribeConversionWorkspaceRevisions", + "file": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.describe_database_entities", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeDatabaseEntities", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeDatabaseEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesAsyncPager", + "shortName": "describe_database_entities" + }, + "description": "Sample for DescribeDatabaseEntities", + "file": "datamigration_v1_generated_data_migration_service_describe_database_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_database_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.describe_database_entities", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeDatabaseEntities", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeDatabaseEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesPager", + "shortName": "describe_database_entities" + }, + "description": "Sample for DescribeDatabaseEntities", + "file": "datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.fetch_static_ips", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.FetchStaticIps", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "FetchStaticIps" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.FetchStaticIpsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsAsyncPager", + "shortName": "fetch_static_ips" + }, + "description": "Sample for FetchStaticIps", + "file": "datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_FetchStaticIps_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.fetch_static_ips", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.FetchStaticIps", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "FetchStaticIps" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.FetchStaticIpsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsPager", + "shortName": "fetch_static_ips" + }, + "description": "Sample for FetchStaticIps", + "file": "datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_FetchStaticIps_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.generate_ssh_script", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateSshScript", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GenerateSshScript" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GenerateSshScriptRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.SshScript", + "shortName": "generate_ssh_script" + }, + "description": "Sample for GenerateSshScript", + "file": "datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateSshScript_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.generate_ssh_script", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateSshScript", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GenerateSshScript" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GenerateSshScriptRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.SshScript", + "shortName": "generate_ssh_script" + }, + "description": "Sample for GenerateSshScript", + "file": "datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateSshScript_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.generate_tcp_proxy_script", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateTcpProxyScript", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GenerateTcpProxyScript" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GenerateTcpProxyScriptRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.TcpProxyScript", + "shortName": "generate_tcp_proxy_script" + }, + "description": "Sample for GenerateTcpProxyScript", + "file": "datamigration_v1_generated_data_migration_service_generate_tcp_proxy_script_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateTcpProxyScript_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_generate_tcp_proxy_script_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.generate_tcp_proxy_script", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateTcpProxyScript", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GenerateTcpProxyScript" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GenerateTcpProxyScriptRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.TcpProxyScript", + "shortName": "generate_tcp_proxy_script" + }, + "description": "Sample for GenerateTcpProxyScript", + "file": "datamigration_v1_generated_data_migration_service_generate_tcp_proxy_script_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateTcpProxyScript_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_generate_tcp_proxy_script_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConnectionProfile", + "shortName": "get_connection_profile" + }, + "description": "Sample for GetConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_get_connection_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConnectionProfile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_connection_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConnectionProfile", + "shortName": "get_connection_profile" + }, + "description": "Sample for GetConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConnectionProfile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConversionWorkspace", + "shortName": "get_conversion_workspace" + }, + "description": "Sample for GetConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConversionWorkspace", + "shortName": "get_conversion_workspace" + }, + "description": "Sample for GetConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_mapping_rule", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMappingRule", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetMappingRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetMappingRuleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.MappingRule", + "shortName": "get_mapping_rule" + }, + "description": "Sample for GetMappingRule", + "file": "datamigration_v1_generated_data_migration_service_get_mapping_rule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetMappingRule_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_mapping_rule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_mapping_rule", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMappingRule", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetMappingRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetMappingRuleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.MappingRule", + "shortName": "get_mapping_rule" + }, + "description": "Sample for GetMappingRule", + "file": "datamigration_v1_generated_data_migration_service_get_mapping_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetMappingRule_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_mapping_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.MigrationJob", + "shortName": "get_migration_job" + }, + "description": "Sample for GetMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_get_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetMigrationJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.MigrationJob", + "shortName": "get_migration_job" + }, + "description": "Sample for GetMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_get_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetMigrationJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetPrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetPrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetPrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.PrivateConnection", + "shortName": "get_private_connection" + }, + "description": "Sample for GetPrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_get_private_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetPrivateConnection_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_private_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetPrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetPrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetPrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.PrivateConnection", + "shortName": "get_private_connection" + }, + "description": "Sample for GetPrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_get_private_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetPrivateConnection_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_private_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.import_mapping_rules", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ImportMappingRules", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ImportMappingRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ImportMappingRulesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_mapping_rules" + }, + "description": "Sample for ImportMappingRules", + "file": "datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ImportMappingRules_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.import_mapping_rules", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ImportMappingRules", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ImportMappingRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ImportMappingRulesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_mapping_rules" + }, + "description": "Sample for ImportMappingRules", + "file": "datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ImportMappingRules_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_connection_profiles", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConnectionProfiles", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConnectionProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConnectionProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesAsyncPager", + "shortName": "list_connection_profiles" + }, + "description": "Sample for ListConnectionProfiles", + "file": "datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_connection_profiles", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConnectionProfiles", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConnectionProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConnectionProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesPager", + "shortName": "list_connection_profiles" + }, + "description": "Sample for ListConnectionProfiles", + "file": "datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_conversion_workspaces", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConversionWorkspaces", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConversionWorkspaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesAsyncPager", + "shortName": "list_conversion_workspaces" + }, + "description": "Sample for ListConversionWorkspaces", + "file": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_conversion_workspaces", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConversionWorkspaces", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConversionWorkspaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesPager", + "shortName": "list_conversion_workspaces" + }, + "description": "Sample for ListConversionWorkspaces", + "file": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_mapping_rules", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMappingRules", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListMappingRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListMappingRulesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMappingRulesAsyncPager", + "shortName": "list_mapping_rules" + }, + "description": "Sample for ListMappingRules", + "file": "datamigration_v1_generated_data_migration_service_list_mapping_rules_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListMappingRules_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_mapping_rules_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_mapping_rules", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMappingRules", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListMappingRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListMappingRulesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMappingRulesPager", + "shortName": "list_mapping_rules" + }, + "description": "Sample for ListMappingRules", + "file": "datamigration_v1_generated_data_migration_service_list_mapping_rules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListMappingRules_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_mapping_rules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_migration_jobs", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMigrationJobs", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListMigrationJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListMigrationJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsAsyncPager", + "shortName": "list_migration_jobs" + }, + "description": "Sample for ListMigrationJobs", + "file": "datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListMigrationJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_migration_jobs", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMigrationJobs", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListMigrationJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListMigrationJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsPager", + "shortName": "list_migration_jobs" + }, + "description": "Sample for ListMigrationJobs", + "file": "datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListMigrationJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_private_connections", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListPrivateConnections", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListPrivateConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsAsyncPager", + "shortName": "list_private_connections" + }, + "description": "Sample for ListPrivateConnections", + "file": "datamigration_v1_generated_data_migration_service_list_private_connections_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListPrivateConnections_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_private_connections_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_private_connections", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListPrivateConnections", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListPrivateConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsPager", + "shortName": "list_private_connections" + }, + "description": "Sample for ListPrivateConnections", + "file": "datamigration_v1_generated_data_migration_service_list_private_connections_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListPrivateConnections_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_private_connections_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.promote_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.PromoteMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "PromoteMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.PromoteMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "promote_migration_job" + }, + "description": "Sample for PromoteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_promote_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_promote_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.promote_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.PromoteMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "PromoteMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.PromoteMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "promote_migration_job" + }, + "description": "Sample for PromoteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.restart_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RestartMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "RestartMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.RestartMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restart_migration_job" + }, + "description": "Sample for RestartMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_restart_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_RestartMigrationJob_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_restart_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.restart_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RestartMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "RestartMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.RestartMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restart_migration_job" + }, + "description": "Sample for RestartMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_RestartMigrationJob_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.resume_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ResumeMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ResumeMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ResumeMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "resume_migration_job" + }, + "description": "Sample for ResumeMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_resume_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_resume_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.resume_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ResumeMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ResumeMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ResumeMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "resume_migration_job" + }, + "description": "Sample for ResumeMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.rollback_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RollbackConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "RollbackConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "rollback_conversion_workspace" + }, + "description": "Sample for RollbackConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.rollback_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RollbackConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "RollbackConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "rollback_conversion_workspace" + }, + "description": "Sample for RollbackConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.search_background_jobs", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SearchBackgroundJobs", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "SearchBackgroundJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse", + "shortName": "search_background_jobs" + }, + "description": "Sample for SearchBackgroundJobs", + "file": "datamigration_v1_generated_data_migration_service_search_background_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_search_background_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.search_background_jobs", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SearchBackgroundJobs", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "SearchBackgroundJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse", + "shortName": "search_background_jobs" + }, + "description": "Sample for SearchBackgroundJobs", + "file": "datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.seed_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SeedConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "SeedConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "seed_conversion_workspace" + }, + "description": "Sample for SeedConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.seed_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SeedConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "SeedConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "seed_conversion_workspace" + }, + "description": "Sample for SeedConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.start_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.StartMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "StartMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.StartMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "start_migration_job" + }, + "description": "Sample for StartMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_start_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_StartMigrationJob_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_start_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.start_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.StartMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "StartMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.StartMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "start_migration_job" + }, + "description": "Sample for StartMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_start_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_StartMigrationJob_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_start_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.stop_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.StopMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "StopMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.StopMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "stop_migration_job" + }, + "description": "Sample for StopMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_stop_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_StopMigrationJob_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_stop_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.stop_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.StopMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "StopMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.StopMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "stop_migration_job" + }, + "description": "Sample for StopMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_StopMigrationJob_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.update_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateConnectionProfileRequest" + }, + { + "name": "connection_profile", + "type": "google.cloud.clouddms_v1.types.ConnectionProfile" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_connection_profile" + }, + "description": "Sample for UpdateConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_update_connection_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_connection_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.update_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateConnectionProfileRequest" + }, + { + "name": "connection_profile", + "type": "google.cloud.clouddms_v1.types.ConnectionProfile" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_connection_profile" + }, + "description": "Sample for UpdateConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.update_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_conversion_workspace" + }, + "description": "Sample for UpdateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.update_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_conversion_workspace" + }, + "description": "Sample for UpdateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.update_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateMigrationJobRequest" + }, + { + "name": "migration_job", + "type": "google.cloud.clouddms_v1.types.MigrationJob" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_migration_job" + }, + "description": "Sample for UpdateMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_update_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.update_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateMigrationJobRequest" + }, + { + "name": "migration_job", + "type": "google.cloud.clouddms_v1.types.MigrationJob" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_migration_job" + }, + "description": "Sample for UpdateMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_update_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.verify_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.VerifyMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "VerifyMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.VerifyMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "verify_migration_job" + }, + "description": "Sample for VerifyMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_verify_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_verify_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.verify_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.VerifyMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "VerifyMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.VerifyMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "verify_migration_job" + }, + "description": "Sample for VerifyMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-cloud-dms/v1/scripts/fixup_clouddms_v1_keywords.py b/owl-bot-staging/google-cloud-dms/v1/scripts/fixup_clouddms_v1_keywords.py new file mode 100644 index 000000000000..de3cd296fefd --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/scripts/fixup_clouddms_v1_keywords.py @@ -0,0 +1,216 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class clouddmsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'apply_conversion_workspace': ('name', 'filter', 'dry_run', 'auto_commit', 'connection_profile', ), + 'commit_conversion_workspace': ('name', 'commit_name', ), + 'convert_conversion_workspace': ('name', 'auto_commit', 'filter', 'convert_full_path', ), + 'create_connection_profile': ('parent', 'connection_profile_id', 'connection_profile', 'request_id', 'validate_only', 'skip_validation', ), + 'create_conversion_workspace': ('parent', 'conversion_workspace_id', 'conversion_workspace', 'request_id', ), + 'create_mapping_rule': ('parent', 'mapping_rule_id', 'mapping_rule', 'request_id', ), + 'create_migration_job': ('parent', 'migration_job_id', 'migration_job', 'request_id', ), + 'create_private_connection': ('parent', 'private_connection_id', 'private_connection', 'request_id', 'skip_validation', ), + 'delete_connection_profile': ('name', 'request_id', 'force', ), + 'delete_conversion_workspace': ('name', 'request_id', 'force', ), + 'delete_mapping_rule': ('name', 'request_id', ), + 'delete_migration_job': ('name', 'request_id', 'force', ), + 'delete_private_connection': ('name', 'request_id', ), + 'describe_conversion_workspace_revisions': ('conversion_workspace', 'commit_id', ), + 'describe_database_entities': ('conversion_workspace', 'tree', 'page_size', 'page_token', 'uncommitted', 'commit_id', 'filter', 'view', ), + 'fetch_static_ips': ('name', 'page_size', 'page_token', ), + 'generate_ssh_script': ('vm', 'migration_job', 'vm_creation_config', 'vm_selection_config', 'vm_port', ), + 'generate_tcp_proxy_script': ('vm_name', 'vm_machine_type', 'vm_subnet', 'migration_job', 'vm_zone', ), + 'get_connection_profile': ('name', ), + 'get_conversion_workspace': ('name', ), + 'get_mapping_rule': ('name', ), + 'get_migration_job': ('name', ), + 'get_private_connection': ('name', ), + 'import_mapping_rules': ('parent', 'rules_format', 'rules_files', 'auto_commit', ), + 'list_connection_profiles': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_conversion_workspaces': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_mapping_rules': ('parent', 'page_size', 'page_token', ), + 'list_migration_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_private_connections': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'promote_migration_job': ('name', ), + 'restart_migration_job': ('name', 'skip_validation', ), + 'resume_migration_job': ('name', ), + 'rollback_conversion_workspace': ('name', ), + 'search_background_jobs': ('conversion_workspace', 'return_most_recent_per_job_type', 'max_size', 'completed_until_time', ), + 'seed_conversion_workspace': ('name', 'auto_commit', 'source_connection_profile', 'destination_connection_profile', ), + 'start_migration_job': ('name', 'skip_validation', ), + 'stop_migration_job': ('name', ), + 'update_connection_profile': ('update_mask', 'connection_profile', 'request_id', 'validate_only', 'skip_validation', ), + 'update_conversion_workspace': ('update_mask', 'conversion_workspace', 'request_id', ), + 'update_migration_job': ('update_mask', 'migration_job', 'request_id', ), + 'verify_migration_job': ('name', 'update_mask', 'migration_job', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=clouddmsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the clouddms client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-dms/v1/setup.py b/owl-bot-staging/google-cloud-dms/v1/setup.py new file mode 100644 index 000000000000..f35bfe1c316a --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/setup.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-dms' + + +description = "Google Cloud Dms API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/clouddms/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.13.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.13.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.7.txt new file mode 100644 index 000000000000..a81fb6bcd05c --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.7.txt @@ -0,0 +1,11 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 +grpc-google-iam-v1==0.12.4 diff --git a/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dms/v1/tests/__init__.py b/owl-bot-staging/google-cloud-dms/v1/tests/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dms/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-dms/v1/tests/unit/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dms/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-dms/v1/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dms/v1/tests/unit/gapic/clouddms_v1/__init__.py b/owl-bot-staging/google-cloud-dms/v1/tests/unit/gapic/clouddms_v1/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/tests/unit/gapic/clouddms_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dms/v1/tests/unit/gapic/clouddms_v1/test_data_migration_service.py b/owl-bot-staging/google-cloud-dms/v1/tests/unit/gapic/clouddms_v1/test_data_migration_service.py new file mode 100644 index 000000000000..25d98a36339d --- /dev/null +++ b/owl-bot-staging/google-cloud-dms/v1/tests/unit/gapic/clouddms_v1/test_data_migration_service.py @@ -0,0 +1,18023 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.clouddms_v1.services.data_migration_service import DataMigrationServiceAsyncClient +from google.cloud.clouddms_v1.services.data_migration_service import DataMigrationServiceClient +from google.cloud.clouddms_v1.services.data_migration_service import pagers +from google.cloud.clouddms_v1.services.data_migration_service import transports +from google.cloud.clouddms_v1.types import clouddms +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataMigrationServiceClient._get_default_mtls_endpoint(None) is None + assert DataMigrationServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DataMigrationServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DataMigrationServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DataMigrationServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DataMigrationServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert DataMigrationServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DataMigrationServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DataMigrationServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + DataMigrationServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DataMigrationServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DataMigrationServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DataMigrationServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DataMigrationServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DataMigrationServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DataMigrationServiceClient._get_client_cert_source(None, False) is None + assert DataMigrationServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert DataMigrationServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert DataMigrationServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert DataMigrationServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(DataMigrationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataMigrationServiceClient)) +@mock.patch.object(DataMigrationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataMigrationServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DataMigrationServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataMigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DataMigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert DataMigrationServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert DataMigrationServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataMigrationServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataMigrationServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert DataMigrationServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataMigrationServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataMigrationServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataMigrationServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataMigrationServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert DataMigrationServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + DataMigrationServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert DataMigrationServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert DataMigrationServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert DataMigrationServiceClient._get_universe_domain(None, None) == DataMigrationServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + DataMigrationServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataMigrationServiceClient, "grpc"), + (DataMigrationServiceAsyncClient, "grpc_asyncio"), +]) +def test_data_migration_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datamigration.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DataMigrationServiceGrpcTransport, "grpc"), + (transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_data_migration_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataMigrationServiceClient, "grpc"), + (DataMigrationServiceAsyncClient, "grpc_asyncio"), +]) +def test_data_migration_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datamigration.googleapis.com:443' + ) + + +def test_data_migration_service_client_get_transport_class(): + transport = DataMigrationServiceClient.get_transport_class() + available_transports = [ + transports.DataMigrationServiceGrpcTransport, + ] + assert transport in available_transports + + transport = DataMigrationServiceClient.get_transport_class("grpc") + assert transport == transports.DataMigrationServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc"), + (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(DataMigrationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataMigrationServiceClient)) +@mock.patch.object(DataMigrationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataMigrationServiceAsyncClient)) +def test_data_migration_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataMigrationServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataMigrationServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc", "true"), + (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc", "false"), + (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(DataMigrationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataMigrationServiceClient)) +@mock.patch.object(DataMigrationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataMigrationServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_migration_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DataMigrationServiceClient, DataMigrationServiceAsyncClient +]) +@mock.patch.object(DataMigrationServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataMigrationServiceClient)) +@mock.patch.object(DataMigrationServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataMigrationServiceAsyncClient)) +def test_data_migration_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + DataMigrationServiceClient, DataMigrationServiceAsyncClient +]) +@mock.patch.object(DataMigrationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataMigrationServiceClient)) +@mock.patch.object(DataMigrationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataMigrationServiceAsyncClient)) +def test_data_migration_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DataMigrationServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataMigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DataMigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc"), + (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_data_migration_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc", grpc_helpers), + (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_data_migration_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_data_migration_service_client_client_options_from_dict(): + with mock.patch('google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DataMigrationServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc", grpc_helpers), + (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_data_migration_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "datamigration.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="datamigration.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.ListMigrationJobsRequest, + dict, +]) +def test_list_migration_jobs(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListMigrationJobsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_migration_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.ListMigrationJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMigrationJobsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_migration_jobs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.ListMigrationJobsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_migration_jobs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListMigrationJobsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_migration_jobs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_migration_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_migration_jobs] = mock_rpc + request = {} + client.list_migration_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_migration_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_migration_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_migration_jobs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_migration_jobs] = mock_rpc + + request = {} + await client.list_migration_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_migration_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_migration_jobs_async(transport: str = 'grpc_asyncio', request_type=clouddms.ListMigrationJobsRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListMigrationJobsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_migration_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.ListMigrationJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMigrationJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_migration_jobs_async_from_dict(): + await test_list_migration_jobs_async(request_type=dict) + +def test_list_migration_jobs_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListMigrationJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + call.return_value = clouddms.ListMigrationJobsResponse() + client.list_migration_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_migration_jobs_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListMigrationJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListMigrationJobsResponse()) + await client.list_migration_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_migration_jobs_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListMigrationJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_migration_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_migration_jobs_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_migration_jobs( + clouddms.ListMigrationJobsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_migration_jobs_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListMigrationJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListMigrationJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_migration_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_migration_jobs_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_migration_jobs( + clouddms.ListMigrationJobsRequest(), + parent='parent_value', + ) + + +def test_list_migration_jobs_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + next_page_token='abc', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[], + next_page_token='def', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + ], + next_page_token='ghi', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_migration_jobs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, clouddms_resources.MigrationJob) + for i in results) +def test_list_migration_jobs_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + next_page_token='abc', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[], + next_page_token='def', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + ], + next_page_token='ghi', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_migration_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_migration_jobs_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + next_page_token='abc', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[], + next_page_token='def', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + ], + next_page_token='ghi', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_migration_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, clouddms_resources.MigrationJob) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_migration_jobs_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + next_page_token='abc', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[], + next_page_token='def', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + ], + next_page_token='ghi', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_migration_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + clouddms.GetMigrationJobRequest, + dict, +]) +def test_get_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.MigrationJob( + name='name_value', + display_name='display_name_value', + state=clouddms_resources.MigrationJob.State.MAINTENANCE, + phase=clouddms_resources.MigrationJob.Phase.FULL_DUMP, + type_=clouddms_resources.MigrationJob.Type.ONE_TIME, + dump_path='dump_path_value', + source='source_value', + destination='destination_value', + filter='filter_value', + cmek_key_name='cmek_key_name_value', + ) + response = client.get_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.GetMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.MigrationJob) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.state == clouddms_resources.MigrationJob.State.MAINTENANCE + assert response.phase == clouddms_resources.MigrationJob.Phase.FULL_DUMP + assert response.type_ == clouddms_resources.MigrationJob.Type.ONE_TIME + assert response.dump_path == 'dump_path_value' + assert response.source == 'source_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.cmek_key_name == 'cmek_key_name_value' + + +def test_get_migration_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.GetMigrationJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_migration_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetMigrationJobRequest( + name='name_value', + ) + +def test_get_migration_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_migration_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_migration_job] = mock_rpc + request = {} + client.get_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_migration_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_migration_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_migration_job] = mock_rpc + + request = {} + await client.get_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.GetMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.MigrationJob( + name='name_value', + display_name='display_name_value', + state=clouddms_resources.MigrationJob.State.MAINTENANCE, + phase=clouddms_resources.MigrationJob.Phase.FULL_DUMP, + type_=clouddms_resources.MigrationJob.Type.ONE_TIME, + dump_path='dump_path_value', + source='source_value', + destination='destination_value', + filter='filter_value', + cmek_key_name='cmek_key_name_value', + )) + response = await client.get_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.GetMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.MigrationJob) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.state == clouddms_resources.MigrationJob.State.MAINTENANCE + assert response.phase == clouddms_resources.MigrationJob.Phase.FULL_DUMP + assert response.type_ == clouddms_resources.MigrationJob.Type.ONE_TIME + assert response.dump_path == 'dump_path_value' + assert response.source == 'source_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.cmek_key_name == 'cmek_key_name_value' + + +@pytest.mark.asyncio +async def test_get_migration_job_async_from_dict(): + await test_get_migration_job_async(request_type=dict) + +def test_get_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + call.return_value = clouddms_resources.MigrationJob() + client.get_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.MigrationJob()) + await client.get_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_migration_job_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.MigrationJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_migration_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_migration_job_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_migration_job( + clouddms.GetMigrationJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_migration_job_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.MigrationJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.MigrationJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_migration_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_migration_job_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_migration_job( + clouddms.GetMigrationJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.CreateMigrationJobRequest, + dict, +]) +def test_create_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.CreateMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_migration_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.CreateMigrationJobRequest( + parent='parent_value', + migration_job_id='migration_job_id_value', + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_migration_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateMigrationJobRequest( + parent='parent_value', + migration_job_id='migration_job_id_value', + request_id='request_id_value', + ) + +def test_create_migration_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_migration_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_migration_job] = mock_rpc + request = {} + client.create_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_migration_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_migration_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_migration_job] = mock_rpc + + request = {} + await client.create_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.CreateMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.CreateMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_migration_job_async_from_dict(): + await test_create_migration_job_async(request_type=dict) + +def test_create_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateMigrationJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateMigrationJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_migration_job_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_migration_job( + parent='parent_value', + migration_job=clouddms_resources.MigrationJob(name='name_value'), + migration_job_id='migration_job_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].migration_job + mock_val = clouddms_resources.MigrationJob(name='name_value') + assert arg == mock_val + arg = args[0].migration_job_id + mock_val = 'migration_job_id_value' + assert arg == mock_val + + +def test_create_migration_job_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_migration_job( + clouddms.CreateMigrationJobRequest(), + parent='parent_value', + migration_job=clouddms_resources.MigrationJob(name='name_value'), + migration_job_id='migration_job_id_value', + ) + +@pytest.mark.asyncio +async def test_create_migration_job_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_migration_job( + parent='parent_value', + migration_job=clouddms_resources.MigrationJob(name='name_value'), + migration_job_id='migration_job_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].migration_job + mock_val = clouddms_resources.MigrationJob(name='name_value') + assert arg == mock_val + arg = args[0].migration_job_id + mock_val = 'migration_job_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_migration_job_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_migration_job( + clouddms.CreateMigrationJobRequest(), + parent='parent_value', + migration_job=clouddms_resources.MigrationJob(name='name_value'), + migration_job_id='migration_job_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.UpdateMigrationJobRequest, + dict, +]) +def test_update_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.UpdateMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_migration_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.UpdateMigrationJobRequest( + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_migration_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateMigrationJobRequest( + request_id='request_id_value', + ) + +def test_update_migration_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_migration_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_migration_job] = mock_rpc + request = {} + client.update_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_migration_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_migration_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_migration_job] = mock_rpc + + request = {} + await client.update_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.UpdateMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.UpdateMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_migration_job_async_from_dict(): + await test_update_migration_job_async(request_type=dict) + +def test_update_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateMigrationJobRequest() + + request.migration_job.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'migration_job.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateMigrationJobRequest() + + request.migration_job.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'migration_job.name=name_value', + ) in kw['metadata'] + + +def test_update_migration_job_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_migration_job( + migration_job=clouddms_resources.MigrationJob(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].migration_job + mock_val = clouddms_resources.MigrationJob(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_migration_job_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_migration_job( + clouddms.UpdateMigrationJobRequest(), + migration_job=clouddms_resources.MigrationJob(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_migration_job_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_migration_job( + migration_job=clouddms_resources.MigrationJob(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].migration_job + mock_val = clouddms_resources.MigrationJob(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_migration_job_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_migration_job( + clouddms.UpdateMigrationJobRequest(), + migration_job=clouddms_resources.MigrationJob(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.DeleteMigrationJobRequest, + dict, +]) +def test_delete_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.DeleteMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_migration_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.DeleteMigrationJobRequest( + name='name_value', + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_migration_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteMigrationJobRequest( + name='name_value', + request_id='request_id_value', + ) + +def test_delete_migration_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_migration_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_migration_job] = mock_rpc + request = {} + client.delete_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_migration_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_migration_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_migration_job] = mock_rpc + + request = {} + await client.delete_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.DeleteMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.DeleteMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_migration_job_async_from_dict(): + await test_delete_migration_job_async(request_type=dict) + +def test_delete_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_migration_job_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_migration_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_migration_job_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_migration_job( + clouddms.DeleteMigrationJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_migration_job_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_migration_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_migration_job_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_migration_job( + clouddms.DeleteMigrationJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.StartMigrationJobRequest, + dict, +]) +def test_start_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.start_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.StartMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_start_migration_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.StartMigrationJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.start_migration_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.StartMigrationJobRequest( + name='name_value', + ) + +def test_start_migration_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.start_migration_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.start_migration_job] = mock_rpc + request = {} + client.start_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.start_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_start_migration_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.start_migration_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.start_migration_job] = mock_rpc + + request = {} + await client.start_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.start_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_start_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.StartMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.start_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.StartMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_start_migration_job_async_from_dict(): + await test_start_migration_job_async(request_type=dict) + +def test_start_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.StartMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.start_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_start_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.StartMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.start_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.StopMigrationJobRequest, + dict, +]) +def test_stop_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stop_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.stop_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.StopMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_stop_migration_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.StopMigrationJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stop_migration_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.stop_migration_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.StopMigrationJobRequest( + name='name_value', + ) + +def test_stop_migration_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.stop_migration_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.stop_migration_job] = mock_rpc + request = {} + client.stop_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.stop_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_stop_migration_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.stop_migration_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.stop_migration_job] = mock_rpc + + request = {} + await client.stop_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.stop_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_stop_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.StopMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stop_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.stop_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.StopMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_stop_migration_job_async_from_dict(): + await test_stop_migration_job_async(request_type=dict) + +def test_stop_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.StopMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stop_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.stop_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_stop_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.StopMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stop_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.stop_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.ResumeMigrationJobRequest, + dict, +]) +def test_resume_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.resume_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.ResumeMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_resume_migration_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.ResumeMigrationJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_migration_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.resume_migration_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ResumeMigrationJobRequest( + name='name_value', + ) + +def test_resume_migration_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.resume_migration_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.resume_migration_job] = mock_rpc + request = {} + client.resume_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.resume_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_resume_migration_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.resume_migration_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.resume_migration_job] = mock_rpc + + request = {} + await client.resume_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.resume_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_resume_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.ResumeMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.resume_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.ResumeMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_resume_migration_job_async_from_dict(): + await test_resume_migration_job_async(request_type=dict) + +def test_resume_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ResumeMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.resume_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_resume_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ResumeMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.resume_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.PromoteMigrationJobRequest, + dict, +]) +def test_promote_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.promote_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.promote_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.PromoteMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_promote_migration_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.PromoteMigrationJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.promote_migration_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.promote_migration_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.PromoteMigrationJobRequest( + name='name_value', + ) + +def test_promote_migration_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.promote_migration_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.promote_migration_job] = mock_rpc + request = {} + client.promote_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.promote_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_promote_migration_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.promote_migration_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.promote_migration_job] = mock_rpc + + request = {} + await client.promote_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.promote_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_promote_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.PromoteMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.promote_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.promote_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.PromoteMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_promote_migration_job_async_from_dict(): + await test_promote_migration_job_async(request_type=dict) + +def test_promote_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.PromoteMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.promote_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.promote_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_promote_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.PromoteMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.promote_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.promote_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.VerifyMigrationJobRequest, + dict, +]) +def test_verify_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.verify_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.VerifyMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_verify_migration_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.VerifyMigrationJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_migration_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.verify_migration_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.VerifyMigrationJobRequest( + name='name_value', + ) + +def test_verify_migration_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.verify_migration_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.verify_migration_job] = mock_rpc + request = {} + client.verify_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.verify_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_verify_migration_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.verify_migration_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.verify_migration_job] = mock_rpc + + request = {} + await client.verify_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.verify_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_verify_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.VerifyMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.verify_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.VerifyMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_verify_migration_job_async_from_dict(): + await test_verify_migration_job_async(request_type=dict) + +def test_verify_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.VerifyMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.verify_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_verify_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.VerifyMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.verify_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.RestartMigrationJobRequest, + dict, +]) +def test_restart_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restart_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.restart_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.RestartMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restart_migration_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.RestartMigrationJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restart_migration_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.restart_migration_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.RestartMigrationJobRequest( + name='name_value', + ) + +def test_restart_migration_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restart_migration_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.restart_migration_job] = mock_rpc + request = {} + client.restart_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restart_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_restart_migration_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.restart_migration_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.restart_migration_job] = mock_rpc + + request = {} + await client.restart_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.restart_migration_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_restart_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.RestartMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restart_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.restart_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.RestartMigrationJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restart_migration_job_async_from_dict(): + await test_restart_migration_job_async(request_type=dict) + +def test_restart_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.RestartMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restart_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.restart_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_restart_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.RestartMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restart_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.restart_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.GenerateSshScriptRequest, + dict, +]) +def test_generate_ssh_script(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_ssh_script), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.SshScript( + script='script_value', + ) + response = client.generate_ssh_script(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.GenerateSshScriptRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.SshScript) + assert response.script == 'script_value' + + +def test_generate_ssh_script_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.GenerateSshScriptRequest( + migration_job='migration_job_value', + vm='vm_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_ssh_script), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.generate_ssh_script(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GenerateSshScriptRequest( + migration_job='migration_job_value', + vm='vm_value', + ) + +def test_generate_ssh_script_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_ssh_script in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.generate_ssh_script] = mock_rpc + request = {} + client.generate_ssh_script(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_ssh_script(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_generate_ssh_script_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.generate_ssh_script in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.generate_ssh_script] = mock_rpc + + request = {} + await client.generate_ssh_script(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.generate_ssh_script(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_generate_ssh_script_async(transport: str = 'grpc_asyncio', request_type=clouddms.GenerateSshScriptRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_ssh_script), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.SshScript( + script='script_value', + )) + response = await client.generate_ssh_script(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.GenerateSshScriptRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.SshScript) + assert response.script == 'script_value' + + +@pytest.mark.asyncio +async def test_generate_ssh_script_async_from_dict(): + await test_generate_ssh_script_async(request_type=dict) + +def test_generate_ssh_script_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GenerateSshScriptRequest() + + request.migration_job = 'migration_job_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_ssh_script), + '__call__') as call: + call.return_value = clouddms.SshScript() + client.generate_ssh_script(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'migration_job=migration_job_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_generate_ssh_script_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GenerateSshScriptRequest() + + request.migration_job = 'migration_job_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_ssh_script), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.SshScript()) + await client.generate_ssh_script(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'migration_job=migration_job_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.GenerateTcpProxyScriptRequest, + dict, +]) +def test_generate_tcp_proxy_script(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_tcp_proxy_script), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.TcpProxyScript( + script='script_value', + ) + response = client.generate_tcp_proxy_script(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.GenerateTcpProxyScriptRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.TcpProxyScript) + assert response.script == 'script_value' + + +def test_generate_tcp_proxy_script_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.GenerateTcpProxyScriptRequest( + migration_job='migration_job_value', + vm_name='vm_name_value', + vm_machine_type='vm_machine_type_value', + vm_zone='vm_zone_value', + vm_subnet='vm_subnet_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_tcp_proxy_script), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.generate_tcp_proxy_script(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GenerateTcpProxyScriptRequest( + migration_job='migration_job_value', + vm_name='vm_name_value', + vm_machine_type='vm_machine_type_value', + vm_zone='vm_zone_value', + vm_subnet='vm_subnet_value', + ) + +def test_generate_tcp_proxy_script_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_tcp_proxy_script in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.generate_tcp_proxy_script] = mock_rpc + request = {} + client.generate_tcp_proxy_script(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_tcp_proxy_script(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_generate_tcp_proxy_script_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.generate_tcp_proxy_script in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.generate_tcp_proxy_script] = mock_rpc + + request = {} + await client.generate_tcp_proxy_script(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.generate_tcp_proxy_script(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_generate_tcp_proxy_script_async(transport: str = 'grpc_asyncio', request_type=clouddms.GenerateTcpProxyScriptRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_tcp_proxy_script), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.TcpProxyScript( + script='script_value', + )) + response = await client.generate_tcp_proxy_script(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.GenerateTcpProxyScriptRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.TcpProxyScript) + assert response.script == 'script_value' + + +@pytest.mark.asyncio +async def test_generate_tcp_proxy_script_async_from_dict(): + await test_generate_tcp_proxy_script_async(request_type=dict) + +def test_generate_tcp_proxy_script_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GenerateTcpProxyScriptRequest() + + request.migration_job = 'migration_job_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_tcp_proxy_script), + '__call__') as call: + call.return_value = clouddms.TcpProxyScript() + client.generate_tcp_proxy_script(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'migration_job=migration_job_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_generate_tcp_proxy_script_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GenerateTcpProxyScriptRequest() + + request.migration_job = 'migration_job_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_tcp_proxy_script), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.TcpProxyScript()) + await client.generate_tcp_proxy_script(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'migration_job=migration_job_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.ListConnectionProfilesRequest, + dict, +]) +def test_list_connection_profiles(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConnectionProfilesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_connection_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.ListConnectionProfilesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionProfilesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_connection_profiles_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.ListConnectionProfilesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_connection_profiles(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListConnectionProfilesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_connection_profiles_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_connection_profiles in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_connection_profiles] = mock_rpc + request = {} + client.list_connection_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_connection_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_connection_profiles_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_connection_profiles in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_connection_profiles] = mock_rpc + + request = {} + await client.list_connection_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_connection_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_connection_profiles_async(transport: str = 'grpc_asyncio', request_type=clouddms.ListConnectionProfilesRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConnectionProfilesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_connection_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.ListConnectionProfilesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionProfilesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_connection_profiles_async_from_dict(): + await test_list_connection_profiles_async(request_type=dict) + +def test_list_connection_profiles_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListConnectionProfilesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + call.return_value = clouddms.ListConnectionProfilesResponse() + client.list_connection_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_connection_profiles_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListConnectionProfilesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConnectionProfilesResponse()) + await client.list_connection_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_connection_profiles_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConnectionProfilesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_connection_profiles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_connection_profiles_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_connection_profiles( + clouddms.ListConnectionProfilesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_connection_profiles_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConnectionProfilesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConnectionProfilesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_connection_profiles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_connection_profiles_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_connection_profiles( + clouddms.ListConnectionProfilesRequest(), + parent='parent_value', + ) + + +def test_list_connection_profiles_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + next_page_token='abc', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[], + next_page_token='def', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + ], + next_page_token='ghi', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_connection_profiles(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, clouddms_resources.ConnectionProfile) + for i in results) +def test_list_connection_profiles_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + next_page_token='abc', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[], + next_page_token='def', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + ], + next_page_token='ghi', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + ), + RuntimeError, + ) + pages = list(client.list_connection_profiles(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_connection_profiles_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + next_page_token='abc', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[], + next_page_token='def', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + ], + next_page_token='ghi', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_connection_profiles(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, clouddms_resources.ConnectionProfile) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_connection_profiles_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + next_page_token='abc', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[], + next_page_token='def', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + ], + next_page_token='ghi', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_connection_profiles(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + clouddms.GetConnectionProfileRequest, + dict, +]) +def test_get_connection_profile(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.ConnectionProfile( + name='name_value', + state=clouddms_resources.ConnectionProfile.State.DRAFT, + display_name='display_name_value', + provider=clouddms_resources.DatabaseProvider.CLOUDSQL, + ) + response = client.get_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.GetConnectionProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.ConnectionProfile) + assert response.name == 'name_value' + assert response.state == clouddms_resources.ConnectionProfile.State.DRAFT + assert response.display_name == 'display_name_value' + assert response.provider == clouddms_resources.DatabaseProvider.CLOUDSQL + + +def test_get_connection_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.GetConnectionProfileRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_connection_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetConnectionProfileRequest( + name='name_value', + ) + +def test_get_connection_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_connection_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_connection_profile] = mock_rpc + request = {} + client.get_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_connection_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_connection_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_connection_profile in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_connection_profile] = mock_rpc + + request = {} + await client.get_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_connection_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_connection_profile_async(transport: str = 'grpc_asyncio', request_type=clouddms.GetConnectionProfileRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.ConnectionProfile( + name='name_value', + state=clouddms_resources.ConnectionProfile.State.DRAFT, + display_name='display_name_value', + provider=clouddms_resources.DatabaseProvider.CLOUDSQL, + )) + response = await client.get_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.GetConnectionProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.ConnectionProfile) + assert response.name == 'name_value' + assert response.state == clouddms_resources.ConnectionProfile.State.DRAFT + assert response.display_name == 'display_name_value' + assert response.provider == clouddms_resources.DatabaseProvider.CLOUDSQL + + +@pytest.mark.asyncio +async def test_get_connection_profile_async_from_dict(): + await test_get_connection_profile_async(request_type=dict) + +def test_get_connection_profile_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetConnectionProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + call.return_value = clouddms_resources.ConnectionProfile() + client.get_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_connection_profile_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetConnectionProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.ConnectionProfile()) + await client.get_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_connection_profile_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.ConnectionProfile() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_connection_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_connection_profile_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection_profile( + clouddms.GetConnectionProfileRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_connection_profile_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.ConnectionProfile() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.ConnectionProfile()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_connection_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_connection_profile_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_connection_profile( + clouddms.GetConnectionProfileRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.CreateConnectionProfileRequest, + dict, +]) +def test_create_connection_profile(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.CreateConnectionProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_connection_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.CreateConnectionProfileRequest( + parent='parent_value', + connection_profile_id='connection_profile_id_value', + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_connection_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateConnectionProfileRequest( + parent='parent_value', + connection_profile_id='connection_profile_id_value', + request_id='request_id_value', + ) + +def test_create_connection_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_connection_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_connection_profile] = mock_rpc + request = {} + client.create_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_connection_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_connection_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_connection_profile in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_connection_profile] = mock_rpc + + request = {} + await client.create_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_connection_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_connection_profile_async(transport: str = 'grpc_asyncio', request_type=clouddms.CreateConnectionProfileRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.CreateConnectionProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_connection_profile_async_from_dict(): + await test_create_connection_profile_async(request_type=dict) + +def test_create_connection_profile_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateConnectionProfileRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_connection_profile_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateConnectionProfileRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_connection_profile_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_connection_profile( + parent='parent_value', + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + connection_profile_id='connection_profile_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].connection_profile + mock_val = clouddms_resources.ConnectionProfile(name='name_value') + assert arg == mock_val + arg = args[0].connection_profile_id + mock_val = 'connection_profile_id_value' + assert arg == mock_val + + +def test_create_connection_profile_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_connection_profile( + clouddms.CreateConnectionProfileRequest(), + parent='parent_value', + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + connection_profile_id='connection_profile_id_value', + ) + +@pytest.mark.asyncio +async def test_create_connection_profile_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_connection_profile( + parent='parent_value', + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + connection_profile_id='connection_profile_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].connection_profile + mock_val = clouddms_resources.ConnectionProfile(name='name_value') + assert arg == mock_val + arg = args[0].connection_profile_id + mock_val = 'connection_profile_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_connection_profile_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_connection_profile( + clouddms.CreateConnectionProfileRequest(), + parent='parent_value', + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + connection_profile_id='connection_profile_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.UpdateConnectionProfileRequest, + dict, +]) +def test_update_connection_profile(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.UpdateConnectionProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_connection_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.UpdateConnectionProfileRequest( + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_connection_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateConnectionProfileRequest( + request_id='request_id_value', + ) + +def test_update_connection_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_connection_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_connection_profile] = mock_rpc + request = {} + client.update_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_connection_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_connection_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_connection_profile in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_connection_profile] = mock_rpc + + request = {} + await client.update_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_connection_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_connection_profile_async(transport: str = 'grpc_asyncio', request_type=clouddms.UpdateConnectionProfileRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.UpdateConnectionProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_connection_profile_async_from_dict(): + await test_update_connection_profile_async(request_type=dict) + +def test_update_connection_profile_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateConnectionProfileRequest() + + request.connection_profile.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'connection_profile.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_connection_profile_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateConnectionProfileRequest() + + request.connection_profile.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'connection_profile.name=name_value', + ) in kw['metadata'] + + +def test_update_connection_profile_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_connection_profile( + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].connection_profile + mock_val = clouddms_resources.ConnectionProfile(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_connection_profile_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_connection_profile( + clouddms.UpdateConnectionProfileRequest(), + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_connection_profile_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_connection_profile( + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].connection_profile + mock_val = clouddms_resources.ConnectionProfile(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_connection_profile_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_connection_profile( + clouddms.UpdateConnectionProfileRequest(), + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.DeleteConnectionProfileRequest, + dict, +]) +def test_delete_connection_profile(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.DeleteConnectionProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_connection_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.DeleteConnectionProfileRequest( + name='name_value', + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_connection_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteConnectionProfileRequest( + name='name_value', + request_id='request_id_value', + ) + +def test_delete_connection_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_connection_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_connection_profile] = mock_rpc + request = {} + client.delete_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_connection_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_connection_profile_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_connection_profile in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_connection_profile] = mock_rpc + + request = {} + await client.delete_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_connection_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_connection_profile_async(transport: str = 'grpc_asyncio', request_type=clouddms.DeleteConnectionProfileRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.DeleteConnectionProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_connection_profile_async_from_dict(): + await test_delete_connection_profile_async(request_type=dict) + +def test_delete_connection_profile_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteConnectionProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_connection_profile_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteConnectionProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_connection_profile_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_connection_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_connection_profile_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_connection_profile( + clouddms.DeleteConnectionProfileRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_connection_profile_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_connection_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_connection_profile_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_connection_profile( + clouddms.DeleteConnectionProfileRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.CreatePrivateConnectionRequest, + dict, +]) +def test_create_private_connection(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.CreatePrivateConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_private_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.CreatePrivateConnectionRequest( + parent='parent_value', + private_connection_id='private_connection_id_value', + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_private_connection(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreatePrivateConnectionRequest( + parent='parent_value', + private_connection_id='private_connection_id_value', + request_id='request_id_value', + ) + +def test_create_private_connection_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_private_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_private_connection] = mock_rpc + request = {} + client.create_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_private_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_private_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_private_connection in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_private_connection] = mock_rpc + + request = {} + await client.create_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_private_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_private_connection_async(transport: str = 'grpc_asyncio', request_type=clouddms.CreatePrivateConnectionRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.CreatePrivateConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_private_connection_async_from_dict(): + await test_create_private_connection_async(request_type=dict) + +def test_create_private_connection_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreatePrivateConnectionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_private_connection_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreatePrivateConnectionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_private_connection_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_private_connection( + parent='parent_value', + private_connection=clouddms_resources.PrivateConnection(name='name_value'), + private_connection_id='private_connection_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].private_connection + mock_val = clouddms_resources.PrivateConnection(name='name_value') + assert arg == mock_val + arg = args[0].private_connection_id + mock_val = 'private_connection_id_value' + assert arg == mock_val + + +def test_create_private_connection_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_private_connection( + clouddms.CreatePrivateConnectionRequest(), + parent='parent_value', + private_connection=clouddms_resources.PrivateConnection(name='name_value'), + private_connection_id='private_connection_id_value', + ) + +@pytest.mark.asyncio +async def test_create_private_connection_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_private_connection( + parent='parent_value', + private_connection=clouddms_resources.PrivateConnection(name='name_value'), + private_connection_id='private_connection_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].private_connection + mock_val = clouddms_resources.PrivateConnection(name='name_value') + assert arg == mock_val + arg = args[0].private_connection_id + mock_val = 'private_connection_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_private_connection_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_private_connection( + clouddms.CreatePrivateConnectionRequest(), + parent='parent_value', + private_connection=clouddms_resources.PrivateConnection(name='name_value'), + private_connection_id='private_connection_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.GetPrivateConnectionRequest, + dict, +]) +def test_get_private_connection(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.PrivateConnection( + name='name_value', + display_name='display_name_value', + state=clouddms_resources.PrivateConnection.State.CREATING, + ) + response = client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.GetPrivateConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.PrivateConnection) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.state == clouddms_resources.PrivateConnection.State.CREATING + + +def test_get_private_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.GetPrivateConnectionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_private_connection(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetPrivateConnectionRequest( + name='name_value', + ) + +def test_get_private_connection_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_private_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_private_connection] = mock_rpc + request = {} + client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_private_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_private_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_private_connection in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_private_connection] = mock_rpc + + request = {} + await client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_private_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_private_connection_async(transport: str = 'grpc_asyncio', request_type=clouddms.GetPrivateConnectionRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.PrivateConnection( + name='name_value', + display_name='display_name_value', + state=clouddms_resources.PrivateConnection.State.CREATING, + )) + response = await client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.GetPrivateConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.PrivateConnection) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.state == clouddms_resources.PrivateConnection.State.CREATING + + +@pytest.mark.asyncio +async def test_get_private_connection_async_from_dict(): + await test_get_private_connection_async(request_type=dict) + +def test_get_private_connection_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetPrivateConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + call.return_value = clouddms_resources.PrivateConnection() + client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_private_connection_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetPrivateConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.PrivateConnection()) + await client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_private_connection_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.PrivateConnection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_private_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_private_connection_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_private_connection( + clouddms.GetPrivateConnectionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_private_connection_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.PrivateConnection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.PrivateConnection()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_private_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_private_connection_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_private_connection( + clouddms.GetPrivateConnectionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.ListPrivateConnectionsRequest, + dict, +]) +def test_list_private_connections(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListPrivateConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.ListPrivateConnectionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPrivateConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_private_connections_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.ListPrivateConnectionsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_private_connections(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListPrivateConnectionsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_private_connections_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_private_connections in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_private_connections] = mock_rpc + request = {} + client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_private_connections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_private_connections_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_private_connections in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_private_connections] = mock_rpc + + request = {} + await client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_private_connections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_private_connections_async(transport: str = 'grpc_asyncio', request_type=clouddms.ListPrivateConnectionsRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListPrivateConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.ListPrivateConnectionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPrivateConnectionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_private_connections_async_from_dict(): + await test_list_private_connections_async(request_type=dict) + +def test_list_private_connections_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListPrivateConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + call.return_value = clouddms.ListPrivateConnectionsResponse() + client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_private_connections_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListPrivateConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListPrivateConnectionsResponse()) + await client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_private_connections_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListPrivateConnectionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_private_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_private_connections_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_private_connections( + clouddms.ListPrivateConnectionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_private_connections_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListPrivateConnectionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListPrivateConnectionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_private_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_private_connections_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_private_connections( + clouddms.ListPrivateConnectionsRequest(), + parent='parent_value', + ) + + +def test_list_private_connections_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token='abc', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token='def', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token='ghi', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_private_connections(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, clouddms_resources.PrivateConnection) + for i in results) +def test_list_private_connections_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token='abc', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token='def', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token='ghi', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + pages = list(client.list_private_connections(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_private_connections_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token='abc', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token='def', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token='ghi', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_private_connections(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, clouddms_resources.PrivateConnection) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_private_connections_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token='abc', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token='def', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token='ghi', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_private_connections(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + clouddms.DeletePrivateConnectionRequest, + dict, +]) +def test_delete_private_connection(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.DeletePrivateConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_private_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.DeletePrivateConnectionRequest( + name='name_value', + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_private_connection(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeletePrivateConnectionRequest( + name='name_value', + request_id='request_id_value', + ) + +def test_delete_private_connection_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_private_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_private_connection] = mock_rpc + request = {} + client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_private_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_private_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_private_connection in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_private_connection] = mock_rpc + + request = {} + await client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_private_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_private_connection_async(transport: str = 'grpc_asyncio', request_type=clouddms.DeletePrivateConnectionRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.DeletePrivateConnectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_private_connection_async_from_dict(): + await test_delete_private_connection_async(request_type=dict) + +def test_delete_private_connection_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeletePrivateConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_private_connection_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeletePrivateConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_private_connection_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_private_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_private_connection_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_private_connection( + clouddms.DeletePrivateConnectionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_private_connection_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_private_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_private_connection_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_private_connection( + clouddms.DeletePrivateConnectionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.GetConversionWorkspaceRequest, + dict, +]) +def test_get_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.ConversionWorkspace( + name='name_value', + has_uncommitted_changes=True, + latest_commit_id='latest_commit_id_value', + display_name='display_name_value', + ) + response = client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.GetConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, conversionworkspace_resources.ConversionWorkspace) + assert response.name == 'name_value' + assert response.has_uncommitted_changes is True + assert response.latest_commit_id == 'latest_commit_id_value' + assert response.display_name == 'display_name_value' + + +def test_get_conversion_workspace_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.GetConversionWorkspaceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_conversion_workspace(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetConversionWorkspaceRequest( + name='name_value', + ) + +def test_get_conversion_workspace_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_conversion_workspace in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_conversion_workspace] = mock_rpc + request = {} + client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_conversion_workspace_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_conversion_workspace in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_conversion_workspace] = mock_rpc + + request = {} + await client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.GetConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.ConversionWorkspace( + name='name_value', + has_uncommitted_changes=True, + latest_commit_id='latest_commit_id_value', + display_name='display_name_value', + )) + response = await client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.GetConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, conversionworkspace_resources.ConversionWorkspace) + assert response.name == 'name_value' + assert response.has_uncommitted_changes is True + assert response.latest_commit_id == 'latest_commit_id_value' + assert response.display_name == 'display_name_value' + + +@pytest.mark.asyncio +async def test_get_conversion_workspace_async_from_dict(): + await test_get_conversion_workspace_async(request_type=dict) + +def test_get_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + call.return_value = conversionworkspace_resources.ConversionWorkspace() + client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.ConversionWorkspace()) + await client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.ConversionWorkspace() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_conversion_workspace( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_conversion_workspace( + clouddms.GetConversionWorkspaceRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.ConversionWorkspace() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.ConversionWorkspace()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_conversion_workspace( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_conversion_workspace( + clouddms.GetConversionWorkspaceRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.ListConversionWorkspacesRequest, + dict, +]) +def test_list_conversion_workspaces(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConversionWorkspacesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.ListConversionWorkspacesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConversionWorkspacesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_conversion_workspaces_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.ListConversionWorkspacesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_conversion_workspaces(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListConversionWorkspacesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_conversion_workspaces_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_conversion_workspaces in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_conversion_workspaces] = mock_rpc + request = {} + client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_conversion_workspaces(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_conversion_workspaces in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_conversion_workspaces] = mock_rpc + + request = {} + await client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_conversion_workspaces(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async(transport: str = 'grpc_asyncio', request_type=clouddms.ListConversionWorkspacesRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConversionWorkspacesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.ListConversionWorkspacesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConversionWorkspacesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async_from_dict(): + await test_list_conversion_workspaces_async(request_type=dict) + +def test_list_conversion_workspaces_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListConversionWorkspacesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + call.return_value = clouddms.ListConversionWorkspacesResponse() + client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListConversionWorkspacesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConversionWorkspacesResponse()) + await client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_conversion_workspaces_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConversionWorkspacesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_conversion_workspaces( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_conversion_workspaces_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_conversion_workspaces( + clouddms.ListConversionWorkspacesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConversionWorkspacesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConversionWorkspacesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_conversion_workspaces( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_conversion_workspaces( + clouddms.ListConversionWorkspacesRequest(), + parent='parent_value', + ) + + +def test_list_conversion_workspaces_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='abc', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token='def', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='ghi', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_conversion_workspaces(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, conversionworkspace_resources.ConversionWorkspace) + for i in results) +def test_list_conversion_workspaces_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='abc', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token='def', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='ghi', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + pages = list(client.list_conversion_workspaces(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='abc', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token='def', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='ghi', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_conversion_workspaces(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, conversionworkspace_resources.ConversionWorkspace) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='abc', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token='def', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='ghi', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_conversion_workspaces(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + clouddms.CreateConversionWorkspaceRequest, + dict, +]) +def test_create_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.CreateConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_conversion_workspace_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.CreateConversionWorkspaceRequest( + parent='parent_value', + conversion_workspace_id='conversion_workspace_id_value', + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_conversion_workspace(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateConversionWorkspaceRequest( + parent='parent_value', + conversion_workspace_id='conversion_workspace_id_value', + request_id='request_id_value', + ) + +def test_create_conversion_workspace_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_conversion_workspace in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_conversion_workspace] = mock_rpc + request = {} + client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_conversion_workspace_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_conversion_workspace in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_conversion_workspace] = mock_rpc + + request = {} + await client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.CreateConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.CreateConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_conversion_workspace_async_from_dict(): + await test_create_conversion_workspace_async(request_type=dict) + +def test_create_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateConversionWorkspaceRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateConversionWorkspaceRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_conversion_workspace( + parent='parent_value', + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + conversion_workspace_id='conversion_workspace_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name='name_value') + assert arg == mock_val + arg = args[0].conversion_workspace_id + mock_val = 'conversion_workspace_id_value' + assert arg == mock_val + + +def test_create_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_conversion_workspace( + clouddms.CreateConversionWorkspaceRequest(), + parent='parent_value', + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + conversion_workspace_id='conversion_workspace_id_value', + ) + +@pytest.mark.asyncio +async def test_create_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_conversion_workspace( + parent='parent_value', + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + conversion_workspace_id='conversion_workspace_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name='name_value') + assert arg == mock_val + arg = args[0].conversion_workspace_id + mock_val = 'conversion_workspace_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_conversion_workspace( + clouddms.CreateConversionWorkspaceRequest(), + parent='parent_value', + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + conversion_workspace_id='conversion_workspace_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.UpdateConversionWorkspaceRequest, + dict, +]) +def test_update_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.UpdateConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_conversion_workspace_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.UpdateConversionWorkspaceRequest( + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_conversion_workspace(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateConversionWorkspaceRequest( + request_id='request_id_value', + ) + +def test_update_conversion_workspace_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_conversion_workspace in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_conversion_workspace] = mock_rpc + request = {} + client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_conversion_workspace_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_conversion_workspace in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_conversion_workspace] = mock_rpc + + request = {} + await client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.UpdateConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.UpdateConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_conversion_workspace_async_from_dict(): + await test_update_conversion_workspace_async(request_type=dict) + +def test_update_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateConversionWorkspaceRequest() + + request.conversion_workspace.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateConversionWorkspaceRequest() + + request.conversion_workspace.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace.name=name_value', + ) in kw['metadata'] + + +def test_update_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_conversion_workspace( + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_conversion_workspace( + clouddms.UpdateConversionWorkspaceRequest(), + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_conversion_workspace( + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_conversion_workspace( + clouddms.UpdateConversionWorkspaceRequest(), + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.DeleteConversionWorkspaceRequest, + dict, +]) +def test_delete_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.DeleteConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_conversion_workspace_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.DeleteConversionWorkspaceRequest( + name='name_value', + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_conversion_workspace(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteConversionWorkspaceRequest( + name='name_value', + request_id='request_id_value', + ) + +def test_delete_conversion_workspace_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_conversion_workspace in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_conversion_workspace] = mock_rpc + request = {} + client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_conversion_workspace in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_conversion_workspace] = mock_rpc + + request = {} + await client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.DeleteConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.DeleteConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_async_from_dict(): + await test_delete_conversion_workspace_async(request_type=dict) + +def test_delete_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_conversion_workspace( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_conversion_workspace( + clouddms.DeleteConversionWorkspaceRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_conversion_workspace( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_conversion_workspace( + clouddms.DeleteConversionWorkspaceRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.CreateMappingRuleRequest, + dict, +]) +def test_create_mapping_rule(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mapping_rule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.MappingRule( + name='name_value', + display_name='display_name_value', + state=conversionworkspace_resources.MappingRule.State.ENABLED, + rule_scope=conversionworkspace_resources.DatabaseEntityType.DATABASE_ENTITY_TYPE_SCHEMA, + rule_order=1075, + revision_id='revision_id_value', + ) + response = client.create_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.CreateMappingRuleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, conversionworkspace_resources.MappingRule) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.state == conversionworkspace_resources.MappingRule.State.ENABLED + assert response.rule_scope == conversionworkspace_resources.DatabaseEntityType.DATABASE_ENTITY_TYPE_SCHEMA + assert response.rule_order == 1075 + assert response.revision_id == 'revision_id_value' + + +def test_create_mapping_rule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.CreateMappingRuleRequest( + parent='parent_value', + mapping_rule_id='mapping_rule_id_value', + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mapping_rule), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_mapping_rule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateMappingRuleRequest( + parent='parent_value', + mapping_rule_id='mapping_rule_id_value', + request_id='request_id_value', + ) + +def test_create_mapping_rule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_mapping_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_mapping_rule] = mock_rpc + request = {} + client.create_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_mapping_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_mapping_rule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_mapping_rule in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_mapping_rule] = mock_rpc + + request = {} + await client.create_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_mapping_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_mapping_rule_async(transport: str = 'grpc_asyncio', request_type=clouddms.CreateMappingRuleRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mapping_rule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.MappingRule( + name='name_value', + display_name='display_name_value', + state=conversionworkspace_resources.MappingRule.State.ENABLED, + rule_scope=conversionworkspace_resources.DatabaseEntityType.DATABASE_ENTITY_TYPE_SCHEMA, + rule_order=1075, + revision_id='revision_id_value', + )) + response = await client.create_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.CreateMappingRuleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, conversionworkspace_resources.MappingRule) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.state == conversionworkspace_resources.MappingRule.State.ENABLED + assert response.rule_scope == conversionworkspace_resources.DatabaseEntityType.DATABASE_ENTITY_TYPE_SCHEMA + assert response.rule_order == 1075 + assert response.revision_id == 'revision_id_value' + + +@pytest.mark.asyncio +async def test_create_mapping_rule_async_from_dict(): + await test_create_mapping_rule_async(request_type=dict) + +def test_create_mapping_rule_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateMappingRuleRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mapping_rule), + '__call__') as call: + call.return_value = conversionworkspace_resources.MappingRule() + client.create_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_mapping_rule_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateMappingRuleRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mapping_rule), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.MappingRule()) + await client.create_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_mapping_rule_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mapping_rule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.MappingRule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_mapping_rule( + parent='parent_value', + mapping_rule=conversionworkspace_resources.MappingRule(name='name_value'), + mapping_rule_id='mapping_rule_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].mapping_rule + mock_val = conversionworkspace_resources.MappingRule(name='name_value') + assert arg == mock_val + arg = args[0].mapping_rule_id + mock_val = 'mapping_rule_id_value' + assert arg == mock_val + + +def test_create_mapping_rule_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_mapping_rule( + clouddms.CreateMappingRuleRequest(), + parent='parent_value', + mapping_rule=conversionworkspace_resources.MappingRule(name='name_value'), + mapping_rule_id='mapping_rule_id_value', + ) + +@pytest.mark.asyncio +async def test_create_mapping_rule_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mapping_rule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.MappingRule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.MappingRule()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_mapping_rule( + parent='parent_value', + mapping_rule=conversionworkspace_resources.MappingRule(name='name_value'), + mapping_rule_id='mapping_rule_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].mapping_rule + mock_val = conversionworkspace_resources.MappingRule(name='name_value') + assert arg == mock_val + arg = args[0].mapping_rule_id + mock_val = 'mapping_rule_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_mapping_rule_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_mapping_rule( + clouddms.CreateMappingRuleRequest(), + parent='parent_value', + mapping_rule=conversionworkspace_resources.MappingRule(name='name_value'), + mapping_rule_id='mapping_rule_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.DeleteMappingRuleRequest, + dict, +]) +def test_delete_mapping_rule(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mapping_rule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.DeleteMappingRuleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_mapping_rule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.DeleteMappingRuleRequest( + name='name_value', + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mapping_rule), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_mapping_rule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteMappingRuleRequest( + name='name_value', + request_id='request_id_value', + ) + +def test_delete_mapping_rule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_mapping_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_mapping_rule] = mock_rpc + request = {} + client.delete_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_mapping_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_mapping_rule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_mapping_rule in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_mapping_rule] = mock_rpc + + request = {} + await client.delete_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_mapping_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_mapping_rule_async(transport: str = 'grpc_asyncio', request_type=clouddms.DeleteMappingRuleRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mapping_rule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.DeleteMappingRuleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_mapping_rule_async_from_dict(): + await test_delete_mapping_rule_async(request_type=dict) + +def test_delete_mapping_rule_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteMappingRuleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mapping_rule), + '__call__') as call: + call.return_value = None + client.delete_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_mapping_rule_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteMappingRuleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mapping_rule), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_mapping_rule_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mapping_rule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_mapping_rule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_mapping_rule_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_mapping_rule( + clouddms.DeleteMappingRuleRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_mapping_rule_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mapping_rule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_mapping_rule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_mapping_rule_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_mapping_rule( + clouddms.DeleteMappingRuleRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.ListMappingRulesRequest, + dict, +]) +def test_list_mapping_rules(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mapping_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListMappingRulesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.ListMappingRulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMappingRulesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_mapping_rules_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.ListMappingRulesRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mapping_rules), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_mapping_rules(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListMappingRulesRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_mapping_rules_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_mapping_rules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_mapping_rules] = mock_rpc + request = {} + client.list_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_mapping_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_mapping_rules_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_mapping_rules in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_mapping_rules] = mock_rpc + + request = {} + await client.list_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_mapping_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_mapping_rules_async(transport: str = 'grpc_asyncio', request_type=clouddms.ListMappingRulesRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mapping_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListMappingRulesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.ListMappingRulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMappingRulesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_mapping_rules_async_from_dict(): + await test_list_mapping_rules_async(request_type=dict) + +def test_list_mapping_rules_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListMappingRulesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mapping_rules), + '__call__') as call: + call.return_value = clouddms.ListMappingRulesResponse() + client.list_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_mapping_rules_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListMappingRulesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mapping_rules), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListMappingRulesResponse()) + await client.list_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_mapping_rules_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mapping_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListMappingRulesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_mapping_rules( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_mapping_rules_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_mapping_rules( + clouddms.ListMappingRulesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_mapping_rules_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mapping_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListMappingRulesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListMappingRulesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_mapping_rules( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_mapping_rules_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_mapping_rules( + clouddms.ListMappingRulesRequest(), + parent='parent_value', + ) + + +def test_list_mapping_rules_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mapping_rules), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListMappingRulesResponse( + mapping_rules=[ + conversionworkspace_resources.MappingRule(), + conversionworkspace_resources.MappingRule(), + conversionworkspace_resources.MappingRule(), + ], + next_page_token='abc', + ), + clouddms.ListMappingRulesResponse( + mapping_rules=[], + next_page_token='def', + ), + clouddms.ListMappingRulesResponse( + mapping_rules=[ + conversionworkspace_resources.MappingRule(), + ], + next_page_token='ghi', + ), + clouddms.ListMappingRulesResponse( + mapping_rules=[ + conversionworkspace_resources.MappingRule(), + conversionworkspace_resources.MappingRule(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_mapping_rules(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, conversionworkspace_resources.MappingRule) + for i in results) +def test_list_mapping_rules_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mapping_rules), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListMappingRulesResponse( + mapping_rules=[ + conversionworkspace_resources.MappingRule(), + conversionworkspace_resources.MappingRule(), + conversionworkspace_resources.MappingRule(), + ], + next_page_token='abc', + ), + clouddms.ListMappingRulesResponse( + mapping_rules=[], + next_page_token='def', + ), + clouddms.ListMappingRulesResponse( + mapping_rules=[ + conversionworkspace_resources.MappingRule(), + ], + next_page_token='ghi', + ), + clouddms.ListMappingRulesResponse( + mapping_rules=[ + conversionworkspace_resources.MappingRule(), + conversionworkspace_resources.MappingRule(), + ], + ), + RuntimeError, + ) + pages = list(client.list_mapping_rules(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_mapping_rules_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mapping_rules), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListMappingRulesResponse( + mapping_rules=[ + conversionworkspace_resources.MappingRule(), + conversionworkspace_resources.MappingRule(), + conversionworkspace_resources.MappingRule(), + ], + next_page_token='abc', + ), + clouddms.ListMappingRulesResponse( + mapping_rules=[], + next_page_token='def', + ), + clouddms.ListMappingRulesResponse( + mapping_rules=[ + conversionworkspace_resources.MappingRule(), + ], + next_page_token='ghi', + ), + clouddms.ListMappingRulesResponse( + mapping_rules=[ + conversionworkspace_resources.MappingRule(), + conversionworkspace_resources.MappingRule(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_mapping_rules(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, conversionworkspace_resources.MappingRule) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_mapping_rules_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mapping_rules), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListMappingRulesResponse( + mapping_rules=[ + conversionworkspace_resources.MappingRule(), + conversionworkspace_resources.MappingRule(), + conversionworkspace_resources.MappingRule(), + ], + next_page_token='abc', + ), + clouddms.ListMappingRulesResponse( + mapping_rules=[], + next_page_token='def', + ), + clouddms.ListMappingRulesResponse( + mapping_rules=[ + conversionworkspace_resources.MappingRule(), + ], + next_page_token='ghi', + ), + clouddms.ListMappingRulesResponse( + mapping_rules=[ + conversionworkspace_resources.MappingRule(), + conversionworkspace_resources.MappingRule(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_mapping_rules(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + clouddms.GetMappingRuleRequest, + dict, +]) +def test_get_mapping_rule(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mapping_rule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.MappingRule( + name='name_value', + display_name='display_name_value', + state=conversionworkspace_resources.MappingRule.State.ENABLED, + rule_scope=conversionworkspace_resources.DatabaseEntityType.DATABASE_ENTITY_TYPE_SCHEMA, + rule_order=1075, + revision_id='revision_id_value', + ) + response = client.get_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.GetMappingRuleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, conversionworkspace_resources.MappingRule) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.state == conversionworkspace_resources.MappingRule.State.ENABLED + assert response.rule_scope == conversionworkspace_resources.DatabaseEntityType.DATABASE_ENTITY_TYPE_SCHEMA + assert response.rule_order == 1075 + assert response.revision_id == 'revision_id_value' + + +def test_get_mapping_rule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.GetMappingRuleRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mapping_rule), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_mapping_rule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetMappingRuleRequest( + name='name_value', + ) + +def test_get_mapping_rule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_mapping_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_mapping_rule] = mock_rpc + request = {} + client.get_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_mapping_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_mapping_rule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_mapping_rule in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_mapping_rule] = mock_rpc + + request = {} + await client.get_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_mapping_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_mapping_rule_async(transport: str = 'grpc_asyncio', request_type=clouddms.GetMappingRuleRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mapping_rule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.MappingRule( + name='name_value', + display_name='display_name_value', + state=conversionworkspace_resources.MappingRule.State.ENABLED, + rule_scope=conversionworkspace_resources.DatabaseEntityType.DATABASE_ENTITY_TYPE_SCHEMA, + rule_order=1075, + revision_id='revision_id_value', + )) + response = await client.get_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.GetMappingRuleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, conversionworkspace_resources.MappingRule) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.state == conversionworkspace_resources.MappingRule.State.ENABLED + assert response.rule_scope == conversionworkspace_resources.DatabaseEntityType.DATABASE_ENTITY_TYPE_SCHEMA + assert response.rule_order == 1075 + assert response.revision_id == 'revision_id_value' + + +@pytest.mark.asyncio +async def test_get_mapping_rule_async_from_dict(): + await test_get_mapping_rule_async(request_type=dict) + +def test_get_mapping_rule_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetMappingRuleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mapping_rule), + '__call__') as call: + call.return_value = conversionworkspace_resources.MappingRule() + client.get_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_mapping_rule_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetMappingRuleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mapping_rule), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.MappingRule()) + await client.get_mapping_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_mapping_rule_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mapping_rule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.MappingRule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_mapping_rule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_mapping_rule_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_mapping_rule( + clouddms.GetMappingRuleRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_mapping_rule_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mapping_rule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.MappingRule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.MappingRule()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_mapping_rule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_mapping_rule_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_mapping_rule( + clouddms.GetMappingRuleRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.SeedConversionWorkspaceRequest, + dict, +]) +def test_seed_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.SeedConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_seed_conversion_workspace_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.SeedConversionWorkspaceRequest( + name='name_value', + source_connection_profile='source_connection_profile_value', + destination_connection_profile='destination_connection_profile_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.seed_conversion_workspace(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SeedConversionWorkspaceRequest( + name='name_value', + source_connection_profile='source_connection_profile_value', + destination_connection_profile='destination_connection_profile_value', + ) + +def test_seed_conversion_workspace_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.seed_conversion_workspace in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.seed_conversion_workspace] = mock_rpc + request = {} + client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.seed_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_seed_conversion_workspace_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.seed_conversion_workspace in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.seed_conversion_workspace] = mock_rpc + + request = {} + await client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.seed_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_seed_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.SeedConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.SeedConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_seed_conversion_workspace_async_from_dict(): + await test_seed_conversion_workspace_async(request_type=dict) + +def test_seed_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SeedConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_seed_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SeedConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.ImportMappingRulesRequest, + dict, +]) +def test_import_mapping_rules(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.ImportMappingRulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_mapping_rules_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.ImportMappingRulesRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.import_mapping_rules(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ImportMappingRulesRequest( + parent='parent_value', + ) + +def test_import_mapping_rules_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.import_mapping_rules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.import_mapping_rules] = mock_rpc + request = {} + client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_mapping_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_import_mapping_rules_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.import_mapping_rules in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.import_mapping_rules] = mock_rpc + + request = {} + await client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.import_mapping_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_import_mapping_rules_async(transport: str = 'grpc_asyncio', request_type=clouddms.ImportMappingRulesRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.ImportMappingRulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_import_mapping_rules_async_from_dict(): + await test_import_mapping_rules_async(request_type=dict) + +def test_import_mapping_rules_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ImportMappingRulesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_import_mapping_rules_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ImportMappingRulesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.ConvertConversionWorkspaceRequest, + dict, +]) +def test_convert_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.ConvertConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_convert_conversion_workspace_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.ConvertConversionWorkspaceRequest( + name='name_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.convert_conversion_workspace(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ConvertConversionWorkspaceRequest( + name='name_value', + filter='filter_value', + ) + +def test_convert_conversion_workspace_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.convert_conversion_workspace in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.convert_conversion_workspace] = mock_rpc + request = {} + client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.convert_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_convert_conversion_workspace_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.convert_conversion_workspace in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.convert_conversion_workspace] = mock_rpc + + request = {} + await client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.convert_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_convert_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.ConvertConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.ConvertConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_convert_conversion_workspace_async_from_dict(): + await test_convert_conversion_workspace_async(request_type=dict) + +def test_convert_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ConvertConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_convert_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ConvertConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.CommitConversionWorkspaceRequest, + dict, +]) +def test_commit_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.CommitConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_commit_conversion_workspace_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.CommitConversionWorkspaceRequest( + name='name_value', + commit_name='commit_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.commit_conversion_workspace(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CommitConversionWorkspaceRequest( + name='name_value', + commit_name='commit_name_value', + ) + +def test_commit_conversion_workspace_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.commit_conversion_workspace in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.commit_conversion_workspace] = mock_rpc + request = {} + client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.commit_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_commit_conversion_workspace_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.commit_conversion_workspace in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.commit_conversion_workspace] = mock_rpc + + request = {} + await client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.commit_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_commit_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.CommitConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.CommitConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_commit_conversion_workspace_async_from_dict(): + await test_commit_conversion_workspace_async(request_type=dict) + +def test_commit_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CommitConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_commit_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CommitConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.RollbackConversionWorkspaceRequest, + dict, +]) +def test_rollback_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.RollbackConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_rollback_conversion_workspace_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.RollbackConversionWorkspaceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.rollback_conversion_workspace(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.RollbackConversionWorkspaceRequest( + name='name_value', + ) + +def test_rollback_conversion_workspace_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rollback_conversion_workspace in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.rollback_conversion_workspace] = mock_rpc + request = {} + client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.rollback_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_rollback_conversion_workspace_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.rollback_conversion_workspace in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.rollback_conversion_workspace] = mock_rpc + + request = {} + await client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.rollback_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_rollback_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.RollbackConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.RollbackConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_rollback_conversion_workspace_async_from_dict(): + await test_rollback_conversion_workspace_async(request_type=dict) + +def test_rollback_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.RollbackConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_rollback_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.RollbackConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.ApplyConversionWorkspaceRequest, + dict, +]) +def test_apply_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.ApplyConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_apply_conversion_workspace_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.ApplyConversionWorkspaceRequest( + name='name_value', + filter='filter_value', + connection_profile='connection_profile_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.apply_conversion_workspace(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ApplyConversionWorkspaceRequest( + name='name_value', + filter='filter_value', + connection_profile='connection_profile_value', + ) + +def test_apply_conversion_workspace_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.apply_conversion_workspace in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.apply_conversion_workspace] = mock_rpc + request = {} + client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.apply_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_apply_conversion_workspace_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.apply_conversion_workspace in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.apply_conversion_workspace] = mock_rpc + + request = {} + await client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.apply_conversion_workspace(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_apply_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.ApplyConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.ApplyConversionWorkspaceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_apply_conversion_workspace_async_from_dict(): + await test_apply_conversion_workspace_async(request_type=dict) + +def test_apply_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ApplyConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_apply_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ApplyConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.DescribeDatabaseEntitiesRequest, + dict, +]) +def test_describe_database_entities(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.DescribeDatabaseEntitiesResponse( + next_page_token='next_page_token_value', + ) + response = client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.DescribeDatabaseEntitiesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.DescribeDatabaseEntitiesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_describe_database_entities_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.DescribeDatabaseEntitiesRequest( + conversion_workspace='conversion_workspace_value', + page_token='page_token_value', + commit_id='commit_id_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.describe_database_entities(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeDatabaseEntitiesRequest( + conversion_workspace='conversion_workspace_value', + page_token='page_token_value', + commit_id='commit_id_value', + filter='filter_value', + ) + +def test_describe_database_entities_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.describe_database_entities in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.describe_database_entities] = mock_rpc + request = {} + client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.describe_database_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_describe_database_entities_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.describe_database_entities in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.describe_database_entities] = mock_rpc + + request = {} + await client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.describe_database_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_describe_database_entities_async(transport: str = 'grpc_asyncio', request_type=clouddms.DescribeDatabaseEntitiesRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.DescribeDatabaseEntitiesResponse( + next_page_token='next_page_token_value', + )) + response = await client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.DescribeDatabaseEntitiesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.DescribeDatabaseEntitiesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_describe_database_entities_async_from_dict(): + await test_describe_database_entities_async(request_type=dict) + +def test_describe_database_entities_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeDatabaseEntitiesRequest() + + request.conversion_workspace = 'conversion_workspace_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + call.return_value = clouddms.DescribeDatabaseEntitiesResponse() + client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace=conversion_workspace_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_describe_database_entities_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeDatabaseEntitiesRequest() + + request.conversion_workspace = 'conversion_workspace_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.DescribeDatabaseEntitiesResponse()) + await client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace=conversion_workspace_value', + ) in kw['metadata'] + + +def test_describe_database_entities_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='abc', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token='def', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='ghi', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('conversion_workspace', ''), + )), + ) + pager = client.describe_database_entities(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, conversionworkspace_resources.DatabaseEntity) + for i in results) +def test_describe_database_entities_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='abc', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token='def', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='ghi', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + pages = list(client.describe_database_entities(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_describe_database_entities_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='abc', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token='def', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='ghi', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + async_pager = await client.describe_database_entities(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, conversionworkspace_resources.DatabaseEntity) + for i in responses) + + +@pytest.mark.asyncio +async def test_describe_database_entities_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='abc', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token='def', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='ghi', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.describe_database_entities(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + clouddms.SearchBackgroundJobsRequest, + dict, +]) +def test_search_background_jobs(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.SearchBackgroundJobsResponse( + ) + response = client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.SearchBackgroundJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.SearchBackgroundJobsResponse) + + +def test_search_background_jobs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.SearchBackgroundJobsRequest( + conversion_workspace='conversion_workspace_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.search_background_jobs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SearchBackgroundJobsRequest( + conversion_workspace='conversion_workspace_value', + ) + +def test_search_background_jobs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_background_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.search_background_jobs] = mock_rpc + request = {} + client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_background_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_search_background_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.search_background_jobs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.search_background_jobs] = mock_rpc + + request = {} + await client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.search_background_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_search_background_jobs_async(transport: str = 'grpc_asyncio', request_type=clouddms.SearchBackgroundJobsRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.SearchBackgroundJobsResponse( + )) + response = await client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.SearchBackgroundJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.SearchBackgroundJobsResponse) + + +@pytest.mark.asyncio +async def test_search_background_jobs_async_from_dict(): + await test_search_background_jobs_async(request_type=dict) + +def test_search_background_jobs_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SearchBackgroundJobsRequest() + + request.conversion_workspace = 'conversion_workspace_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), + '__call__') as call: + call.return_value = clouddms.SearchBackgroundJobsResponse() + client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace=conversion_workspace_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_search_background_jobs_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SearchBackgroundJobsRequest() + + request.conversion_workspace = 'conversion_workspace_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.SearchBackgroundJobsResponse()) + await client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace=conversion_workspace_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.DescribeConversionWorkspaceRevisionsRequest, + dict, +]) +def test_describe_conversion_workspace_revisions(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.DescribeConversionWorkspaceRevisionsResponse( + ) + response = client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.DescribeConversionWorkspaceRevisionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.DescribeConversionWorkspaceRevisionsResponse) + + +def test_describe_conversion_workspace_revisions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace='conversion_workspace_value', + commit_id='commit_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.describe_conversion_workspace_revisions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace='conversion_workspace_value', + commit_id='commit_id_value', + ) + +def test_describe_conversion_workspace_revisions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.describe_conversion_workspace_revisions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.describe_conversion_workspace_revisions] = mock_rpc + request = {} + client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.describe_conversion_workspace_revisions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_describe_conversion_workspace_revisions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.describe_conversion_workspace_revisions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.describe_conversion_workspace_revisions] = mock_rpc + + request = {} + await client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.describe_conversion_workspace_revisions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_describe_conversion_workspace_revisions_async(transport: str = 'grpc_asyncio', request_type=clouddms.DescribeConversionWorkspaceRevisionsRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.DescribeConversionWorkspaceRevisionsResponse( + )) + response = await client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.DescribeConversionWorkspaceRevisionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.DescribeConversionWorkspaceRevisionsResponse) + + +@pytest.mark.asyncio +async def test_describe_conversion_workspace_revisions_async_from_dict(): + await test_describe_conversion_workspace_revisions_async(request_type=dict) + +def test_describe_conversion_workspace_revisions_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeConversionWorkspaceRevisionsRequest() + + request.conversion_workspace = 'conversion_workspace_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), + '__call__') as call: + call.return_value = clouddms.DescribeConversionWorkspaceRevisionsResponse() + client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace=conversion_workspace_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_describe_conversion_workspace_revisions_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeConversionWorkspaceRevisionsRequest() + + request.conversion_workspace = 'conversion_workspace_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.DescribeConversionWorkspaceRevisionsResponse()) + await client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace=conversion_workspace_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.FetchStaticIpsRequest, + dict, +]) +def test_fetch_static_ips(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.FetchStaticIpsResponse( + static_ips=['static_ips_value'], + next_page_token='next_page_token_value', + ) + response = client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = clouddms.FetchStaticIpsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchStaticIpsPager) + assert response.static_ips == ['static_ips_value'] + assert response.next_page_token == 'next_page_token_value' + + +def test_fetch_static_ips_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = clouddms.FetchStaticIpsRequest( + name='name_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.fetch_static_ips(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.FetchStaticIpsRequest( + name='name_value', + page_token='page_token_value', + ) + +def test_fetch_static_ips_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.fetch_static_ips in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.fetch_static_ips] = mock_rpc + request = {} + client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_static_ips(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_fetch_static_ips_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.fetch_static_ips in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.fetch_static_ips] = mock_rpc + + request = {} + await client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.fetch_static_ips(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_fetch_static_ips_async(transport: str = 'grpc_asyncio', request_type=clouddms.FetchStaticIpsRequest): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.FetchStaticIpsResponse( + static_ips=['static_ips_value'], + next_page_token='next_page_token_value', + )) + response = await client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = clouddms.FetchStaticIpsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchStaticIpsAsyncPager) + assert response.static_ips == ['static_ips_value'] + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_fetch_static_ips_async_from_dict(): + await test_fetch_static_ips_async(request_type=dict) + +def test_fetch_static_ips_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.FetchStaticIpsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + call.return_value = clouddms.FetchStaticIpsResponse() + client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_fetch_static_ips_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.FetchStaticIpsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.FetchStaticIpsResponse()) + await client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_fetch_static_ips_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.FetchStaticIpsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_static_ips( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_fetch_static_ips_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_static_ips( + clouddms.FetchStaticIpsRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_fetch_static_ips_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.FetchStaticIpsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.FetchStaticIpsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_static_ips( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_fetch_static_ips_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_static_ips( + clouddms.FetchStaticIpsRequest(), + name='name_value', + ) + + +def test_fetch_static_ips_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token='def', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token='ghi', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('name', ''), + )), + ) + pager = client.fetch_static_ips(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) + for i in results) +def test_fetch_static_ips_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token='def', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token='ghi', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_static_ips(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_fetch_static_ips_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token='def', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token='ghi', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_static_ips(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) + for i in responses) + + +@pytest.mark.asyncio +async def test_fetch_static_ips_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token='def', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token='ghi', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.fetch_static_ips(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataMigrationServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataMigrationServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = DataMigrationServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_migration_jobs_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + call.return_value = clouddms.ListMigrationJobsResponse() + client.list_migration_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ListMigrationJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_migration_job_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + call.return_value = clouddms_resources.MigrationJob() + client.get_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.GetMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_migration_job_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.CreateMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_migration_job_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.UpdateMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_migration_job_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.DeleteMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_start_migration_job_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.start_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.StartMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_stop_migration_job_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.stop_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.stop_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.StopMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_resume_migration_job_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.resume_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.resume_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ResumeMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_promote_migration_job_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.promote_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.promote_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.PromoteMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_verify_migration_job_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.verify_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.verify_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.VerifyMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_restart_migration_job_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.restart_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.restart_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.RestartMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_ssh_script_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_ssh_script), + '__call__') as call: + call.return_value = clouddms.SshScript() + client.generate_ssh_script(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.GenerateSshScriptRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_tcp_proxy_script_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_tcp_proxy_script), + '__call__') as call: + call.return_value = clouddms.TcpProxyScript() + client.generate_tcp_proxy_script(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.GenerateTcpProxyScriptRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_connection_profiles_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + call.return_value = clouddms.ListConnectionProfilesResponse() + client.list_connection_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ListConnectionProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_connection_profile_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + call.return_value = clouddms_resources.ConnectionProfile() + client.get_connection_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.GetConnectionProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_connection_profile_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_connection_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.CreateConnectionProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_connection_profile_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_connection_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.UpdateConnectionProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_connection_profile_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_connection_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.DeleteConnectionProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_private_connection_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_private_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.CreatePrivateConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_private_connection_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + call.return_value = clouddms_resources.PrivateConnection() + client.get_private_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.GetPrivateConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_private_connections_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + call.return_value = clouddms.ListPrivateConnectionsResponse() + client.list_private_connections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ListPrivateConnectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_private_connection_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_private_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.DeletePrivateConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_conversion_workspace_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + call.return_value = conversionworkspace_resources.ConversionWorkspace() + client.get_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.GetConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_conversion_workspaces_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + call.return_value = clouddms.ListConversionWorkspacesResponse() + client.list_conversion_workspaces(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ListConversionWorkspacesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_conversion_workspace_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.CreateConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_conversion_workspace_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.UpdateConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_conversion_workspace_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.DeleteConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_mapping_rule_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_mapping_rule), + '__call__') as call: + call.return_value = conversionworkspace_resources.MappingRule() + client.create_mapping_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.CreateMappingRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_mapping_rule_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_mapping_rule), + '__call__') as call: + call.return_value = None + client.delete_mapping_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.DeleteMappingRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_mapping_rules_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_mapping_rules), + '__call__') as call: + call.return_value = clouddms.ListMappingRulesResponse() + client.list_mapping_rules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ListMappingRulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_mapping_rule_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_mapping_rule), + '__call__') as call: + call.return_value = conversionworkspace_resources.MappingRule() + client.get_mapping_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.GetMappingRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_seed_conversion_workspace_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.seed_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.SeedConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_import_mapping_rules_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.import_mapping_rules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ImportMappingRulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_convert_conversion_workspace_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.convert_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ConvertConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_commit_conversion_workspace_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.commit_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.CommitConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_rollback_conversion_workspace_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.rollback_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.RollbackConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_apply_conversion_workspace_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.apply_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ApplyConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_describe_database_entities_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + call.return_value = clouddms.DescribeDatabaseEntitiesResponse() + client.describe_database_entities(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.DescribeDatabaseEntitiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_background_jobs_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), + '__call__') as call: + call.return_value = clouddms.SearchBackgroundJobsResponse() + client.search_background_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.SearchBackgroundJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_describe_conversion_workspace_revisions_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), + '__call__') as call: + call.return_value = clouddms.DescribeConversionWorkspaceRevisionsResponse() + client.describe_conversion_workspace_revisions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.DescribeConversionWorkspaceRevisionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_fetch_static_ips_empty_call_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + call.return_value = clouddms.FetchStaticIpsResponse() + client.fetch_static_ips(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.FetchStaticIpsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DataMigrationServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_migration_jobs_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListMigrationJobsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_migration_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ListMigrationJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_migration_job_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.MigrationJob( + name='name_value', + display_name='display_name_value', + state=clouddms_resources.MigrationJob.State.MAINTENANCE, + phase=clouddms_resources.MigrationJob.Phase.FULL_DUMP, + type_=clouddms_resources.MigrationJob.Type.ONE_TIME, + dump_path='dump_path_value', + source='source_value', + destination='destination_value', + filter='filter_value', + cmek_key_name='cmek_key_name_value', + )) + await client.get_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.GetMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_migration_job_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.CreateMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_migration_job_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.UpdateMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_migration_job_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.DeleteMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_start_migration_job_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.start_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.StartMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_stop_migration_job_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.stop_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.stop_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.StopMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_resume_migration_job_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.resume_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.resume_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ResumeMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_promote_migration_job_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.promote_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.promote_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.PromoteMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_verify_migration_job_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.verify_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.verify_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.VerifyMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_restart_migration_job_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.restart_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.restart_migration_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.RestartMigrationJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_generate_ssh_script_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_ssh_script), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.SshScript( + script='script_value', + )) + await client.generate_ssh_script(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.GenerateSshScriptRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_generate_tcp_proxy_script_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_tcp_proxy_script), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.TcpProxyScript( + script='script_value', + )) + await client.generate_tcp_proxy_script(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.GenerateTcpProxyScriptRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_connection_profiles_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConnectionProfilesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_connection_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ListConnectionProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_connection_profile_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.ConnectionProfile( + name='name_value', + state=clouddms_resources.ConnectionProfile.State.DRAFT, + display_name='display_name_value', + provider=clouddms_resources.DatabaseProvider.CLOUDSQL, + )) + await client.get_connection_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.GetConnectionProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_connection_profile_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_connection_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.CreateConnectionProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_connection_profile_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_connection_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.UpdateConnectionProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_connection_profile_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_connection_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.DeleteConnectionProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_private_connection_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_private_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.CreatePrivateConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_private_connection_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.PrivateConnection( + name='name_value', + display_name='display_name_value', + state=clouddms_resources.PrivateConnection.State.CREATING, + )) + await client.get_private_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.GetPrivateConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_private_connections_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListPrivateConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_private_connections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ListPrivateConnectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_private_connection_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_private_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.DeletePrivateConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_conversion_workspace_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.ConversionWorkspace( + name='name_value', + has_uncommitted_changes=True, + latest_commit_id='latest_commit_id_value', + display_name='display_name_value', + )) + await client.get_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.GetConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_conversion_workspaces_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConversionWorkspacesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_conversion_workspaces(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ListConversionWorkspacesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_conversion_workspace_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.CreateConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_conversion_workspace_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.UpdateConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_conversion_workspace_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.DeleteConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_mapping_rule_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_mapping_rule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.MappingRule( + name='name_value', + display_name='display_name_value', + state=conversionworkspace_resources.MappingRule.State.ENABLED, + rule_scope=conversionworkspace_resources.DatabaseEntityType.DATABASE_ENTITY_TYPE_SCHEMA, + rule_order=1075, + revision_id='revision_id_value', + )) + await client.create_mapping_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.CreateMappingRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_mapping_rule_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_mapping_rule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_mapping_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.DeleteMappingRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_mapping_rules_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_mapping_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListMappingRulesResponse( + next_page_token='next_page_token_value', + )) + await client.list_mapping_rules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ListMappingRulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_mapping_rule_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_mapping_rule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.MappingRule( + name='name_value', + display_name='display_name_value', + state=conversionworkspace_resources.MappingRule.State.ENABLED, + rule_scope=conversionworkspace_resources.DatabaseEntityType.DATABASE_ENTITY_TYPE_SCHEMA, + rule_order=1075, + revision_id='revision_id_value', + )) + await client.get_mapping_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.GetMappingRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_seed_conversion_workspace_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.seed_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.SeedConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_import_mapping_rules_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.import_mapping_rules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ImportMappingRulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_convert_conversion_workspace_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.convert_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ConvertConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_commit_conversion_workspace_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.commit_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.CommitConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_rollback_conversion_workspace_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.rollback_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.RollbackConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_apply_conversion_workspace_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.apply_conversion_workspace(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.ApplyConversionWorkspaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_describe_database_entities_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.DescribeDatabaseEntitiesResponse( + next_page_token='next_page_token_value', + )) + await client.describe_database_entities(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.DescribeDatabaseEntitiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_search_background_jobs_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.SearchBackgroundJobsResponse( + )) + await client.search_background_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.SearchBackgroundJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_describe_conversion_workspace_revisions_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.DescribeConversionWorkspaceRevisionsResponse( + )) + await client.describe_conversion_workspace_revisions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.DescribeConversionWorkspaceRevisionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_fetch_static_ips_empty_call_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.FetchStaticIpsResponse( + static_ips=['static_ips_value'], + next_page_token='next_page_token_value', + )) + await client.fetch_static_ips(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = clouddms.FetchStaticIpsRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataMigrationServiceGrpcTransport, + ) + +def test_data_migration_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataMigrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_data_migration_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DataMigrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_migration_jobs', + 'get_migration_job', + 'create_migration_job', + 'update_migration_job', + 'delete_migration_job', + 'start_migration_job', + 'stop_migration_job', + 'resume_migration_job', + 'promote_migration_job', + 'verify_migration_job', + 'restart_migration_job', + 'generate_ssh_script', + 'generate_tcp_proxy_script', + 'list_connection_profiles', + 'get_connection_profile', + 'create_connection_profile', + 'update_connection_profile', + 'delete_connection_profile', + 'create_private_connection', + 'get_private_connection', + 'list_private_connections', + 'delete_private_connection', + 'get_conversion_workspace', + 'list_conversion_workspaces', + 'create_conversion_workspace', + 'update_conversion_workspace', + 'delete_conversion_workspace', + 'create_mapping_rule', + 'delete_mapping_rule', + 'list_mapping_rules', + 'get_mapping_rule', + 'seed_conversion_workspace', + 'import_mapping_rules', + 'convert_conversion_workspace', + 'commit_conversion_workspace', + 'rollback_conversion_workspace', + 'apply_conversion_workspace', + 'describe_database_entities', + 'search_background_jobs', + 'describe_conversion_workspace_revisions', + 'fetch_static_ips', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_data_migration_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataMigrationServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_data_migration_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataMigrationServiceTransport() + adc.assert_called_once() + + +def test_data_migration_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataMigrationServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_data_migration_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_data_migration_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataMigrationServiceGrpcTransport, grpc_helpers), + (transports.DataMigrationServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_data_migration_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "datamigration.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="datamigration.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DataMigrationServiceGrpcTransport, transports.DataMigrationServiceGrpcAsyncIOTransport]) +def test_data_migration_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_data_migration_service_host_no_port(transport_name): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datamigration.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datamigration.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_data_migration_service_host_with_port(transport_name): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datamigration.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datamigration.googleapis.com:8000' + ) + +def test_data_migration_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataMigrationServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_data_migration_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataMigrationServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataMigrationServiceGrpcTransport, transports.DataMigrationServiceGrpcAsyncIOTransport]) +def test_data_migration_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataMigrationServiceGrpcTransport, transports.DataMigrationServiceGrpcAsyncIOTransport]) +def test_data_migration_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_migration_service_grpc_lro_client(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_migration_service_grpc_lro_async_client(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_connection_profile_path(): + project = "squid" + location = "clam" + connection_profile = "whelk" + expected = "projects/{project}/locations/{location}/connectionProfiles/{connection_profile}".format(project=project, location=location, connection_profile=connection_profile, ) + actual = DataMigrationServiceClient.connection_profile_path(project, location, connection_profile) + assert expected == actual + + +def test_parse_connection_profile_path(): + expected = { + "project": "octopus", + "location": "oyster", + "connection_profile": "nudibranch", + } + path = DataMigrationServiceClient.connection_profile_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_connection_profile_path(path) + assert expected == actual + +def test_conversion_workspace_path(): + project = "cuttlefish" + location = "mussel" + conversion_workspace = "winkle" + expected = "projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}".format(project=project, location=location, conversion_workspace=conversion_workspace, ) + actual = DataMigrationServiceClient.conversion_workspace_path(project, location, conversion_workspace) + assert expected == actual + + +def test_parse_conversion_workspace_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "conversion_workspace": "abalone", + } + path = DataMigrationServiceClient.conversion_workspace_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_conversion_workspace_path(path) + assert expected == actual + +def test_mapping_rule_path(): + project = "squid" + location = "clam" + conversion_workspace = "whelk" + mapping_rule = "octopus" + expected = "projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}/mappingRules/{mapping_rule}".format(project=project, location=location, conversion_workspace=conversion_workspace, mapping_rule=mapping_rule, ) + actual = DataMigrationServiceClient.mapping_rule_path(project, location, conversion_workspace, mapping_rule) + assert expected == actual + + +def test_parse_mapping_rule_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "conversion_workspace": "cuttlefish", + "mapping_rule": "mussel", + } + path = DataMigrationServiceClient.mapping_rule_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_mapping_rule_path(path) + assert expected == actual + +def test_migration_job_path(): + project = "winkle" + location = "nautilus" + migration_job = "scallop" + expected = "projects/{project}/locations/{location}/migrationJobs/{migration_job}".format(project=project, location=location, migration_job=migration_job, ) + actual = DataMigrationServiceClient.migration_job_path(project, location, migration_job) + assert expected == actual + + +def test_parse_migration_job_path(): + expected = { + "project": "abalone", + "location": "squid", + "migration_job": "clam", + } + path = DataMigrationServiceClient.migration_job_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_migration_job_path(path) + assert expected == actual + +def test_networks_path(): + project = "whelk" + network = "octopus" + expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + actual = DataMigrationServiceClient.networks_path(project, network) + assert expected == actual + + +def test_parse_networks_path(): + expected = { + "project": "oyster", + "network": "nudibranch", + } + path = DataMigrationServiceClient.networks_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_networks_path(path) + assert expected == actual + +def test_private_connection_path(): + project = "cuttlefish" + location = "mussel" + private_connection = "winkle" + expected = "projects/{project}/locations/{location}/privateConnections/{private_connection}".format(project=project, location=location, private_connection=private_connection, ) + actual = DataMigrationServiceClient.private_connection_path(project, location, private_connection) + assert expected == actual + + +def test_parse_private_connection_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "private_connection": "abalone", + } + path = DataMigrationServiceClient.private_connection_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_private_connection_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DataMigrationServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = DataMigrationServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = DataMigrationServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = DataMigrationServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DataMigrationServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = DataMigrationServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = DataMigrationServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = DataMigrationServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DataMigrationServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = DataMigrationServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DataMigrationServiceTransport, '_prep_wrapped_messages') as prep: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DataMigrationServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = DataMigrationServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + +def test_set_iam_policy_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + +def test_set_iam_policy_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + +def test_get_iam_policy(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +def test_test_iam_permissions(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DataMigrationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport), + (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-quotas/v1/.coveragerc b/owl-bot-staging/google-cloud-quotas/v1/.coveragerc new file mode 100644 index 000000000000..ed4665249a5f --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/cloudquotas/__init__.py + google/cloud/cloudquotas/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-cloud-quotas/v1/.flake8 b/owl-bot-staging/google-cloud-quotas/v1/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-cloud-quotas/v1/MANIFEST.in b/owl-bot-staging/google-cloud-quotas/v1/MANIFEST.in new file mode 100644 index 000000000000..1d98fda4d355 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/cloudquotas *.py +recursive-include google/cloud/cloudquotas_v1 *.py diff --git a/owl-bot-staging/google-cloud-quotas/v1/README.rst b/owl-bot-staging/google-cloud-quotas/v1/README.rst new file mode 100644 index 000000000000..ac6de5807fc8 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Cloudquotas API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Cloudquotas API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-quotas/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-quotas/v1/docs/_static/custom.css new file mode 100644 index 000000000000..06423be0b592 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/google-cloud-quotas/v1/docs/cloudquotas_v1/cloud_quotas.rst b/owl-bot-staging/google-cloud-quotas/v1/docs/cloudquotas_v1/cloud_quotas.rst new file mode 100644 index 000000000000..eebe1aab60fb --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/docs/cloudquotas_v1/cloud_quotas.rst @@ -0,0 +1,10 @@ +CloudQuotas +----------------------------- + +.. automodule:: google.cloud.cloudquotas_v1.services.cloud_quotas + :members: + :inherited-members: + +.. automodule:: google.cloud.cloudquotas_v1.services.cloud_quotas.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-quotas/v1/docs/cloudquotas_v1/services_.rst b/owl-bot-staging/google-cloud-quotas/v1/docs/cloudquotas_v1/services_.rst new file mode 100644 index 000000000000..2ed9c6a7e724 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/docs/cloudquotas_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Cloudquotas v1 API +============================================ +.. toctree:: + :maxdepth: 2 + + cloud_quotas diff --git a/owl-bot-staging/google-cloud-quotas/v1/docs/cloudquotas_v1/types_.rst b/owl-bot-staging/google-cloud-quotas/v1/docs/cloudquotas_v1/types_.rst new file mode 100644 index 000000000000..6b22302fe873 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/docs/cloudquotas_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Cloudquotas v1 API +========================================= + +.. automodule:: google.cloud.cloudquotas_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-cloud-quotas/v1/docs/conf.py b/owl-bot-staging/google-cloud-quotas/v1/docs/conf.py new file mode 100644 index 000000000000..ebcab37c9dac --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-quotas documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-quotas" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-quotas-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-quotas.tex", + u"google-cloud-quotas Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-quotas", + u"Google Cloud Cloudquotas Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-quotas", + u"google-cloud-quotas Documentation", + author, + "google-cloud-quotas", + "GAPIC library for Google Cloud Cloudquotas API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-quotas/v1/docs/index.rst b/owl-bot-staging/google-cloud-quotas/v1/docs/index.rst new file mode 100644 index 000000000000..ba79a220e63a --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + cloudquotas_v1/services_ + cloudquotas_v1/types_ diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas/__init__.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas/__init__.py new file mode 100644 index 000000000000..f50207fb9892 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas/__init__.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.cloudquotas import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.cloudquotas_v1.services.cloud_quotas.client import CloudQuotasClient +from google.cloud.cloudquotas_v1.services.cloud_quotas.async_client import CloudQuotasAsyncClient + +from google.cloud.cloudquotas_v1.types.cloudquotas import CreateQuotaPreferenceRequest +from google.cloud.cloudquotas_v1.types.cloudquotas import GetQuotaInfoRequest +from google.cloud.cloudquotas_v1.types.cloudquotas import GetQuotaPreferenceRequest +from google.cloud.cloudquotas_v1.types.cloudquotas import ListQuotaInfosRequest +from google.cloud.cloudquotas_v1.types.cloudquotas import ListQuotaInfosResponse +from google.cloud.cloudquotas_v1.types.cloudquotas import ListQuotaPreferencesRequest +from google.cloud.cloudquotas_v1.types.cloudquotas import ListQuotaPreferencesResponse +from google.cloud.cloudquotas_v1.types.cloudquotas import UpdateQuotaPreferenceRequest +from google.cloud.cloudquotas_v1.types.resources import DimensionsInfo +from google.cloud.cloudquotas_v1.types.resources import QuotaConfig +from google.cloud.cloudquotas_v1.types.resources import QuotaDetails +from google.cloud.cloudquotas_v1.types.resources import QuotaIncreaseEligibility +from google.cloud.cloudquotas_v1.types.resources import QuotaInfo +from google.cloud.cloudquotas_v1.types.resources import QuotaPreference +from google.cloud.cloudquotas_v1.types.resources import RolloutInfo +from google.cloud.cloudquotas_v1.types.resources import QuotaSafetyCheck + +__all__ = ('CloudQuotasClient', + 'CloudQuotasAsyncClient', + 'CreateQuotaPreferenceRequest', + 'GetQuotaInfoRequest', + 'GetQuotaPreferenceRequest', + 'ListQuotaInfosRequest', + 'ListQuotaInfosResponse', + 'ListQuotaPreferencesRequest', + 'ListQuotaPreferencesResponse', + 'UpdateQuotaPreferenceRequest', + 'DimensionsInfo', + 'QuotaConfig', + 'QuotaDetails', + 'QuotaIncreaseEligibility', + 'QuotaInfo', + 'QuotaPreference', + 'RolloutInfo', + 'QuotaSafetyCheck', +) diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas/gapic_version.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas/py.typed b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas/py.typed new file mode 100644 index 000000000000..13b6e7a7c797 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-quotas package uses inline types. diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/__init__.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/__init__.py new file mode 100644 index 000000000000..6fa94c423d83 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/__init__.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.cloudquotas_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.cloud_quotas import CloudQuotasClient +from .services.cloud_quotas import CloudQuotasAsyncClient + +from .types.cloudquotas import CreateQuotaPreferenceRequest +from .types.cloudquotas import GetQuotaInfoRequest +from .types.cloudquotas import GetQuotaPreferenceRequest +from .types.cloudquotas import ListQuotaInfosRequest +from .types.cloudquotas import ListQuotaInfosResponse +from .types.cloudquotas import ListQuotaPreferencesRequest +from .types.cloudquotas import ListQuotaPreferencesResponse +from .types.cloudquotas import UpdateQuotaPreferenceRequest +from .types.resources import DimensionsInfo +from .types.resources import QuotaConfig +from .types.resources import QuotaDetails +from .types.resources import QuotaIncreaseEligibility +from .types.resources import QuotaInfo +from .types.resources import QuotaPreference +from .types.resources import RolloutInfo +from .types.resources import QuotaSafetyCheck + +__all__ = ( + 'CloudQuotasAsyncClient', +'CloudQuotasClient', +'CreateQuotaPreferenceRequest', +'DimensionsInfo', +'GetQuotaInfoRequest', +'GetQuotaPreferenceRequest', +'ListQuotaInfosRequest', +'ListQuotaInfosResponse', +'ListQuotaPreferencesRequest', +'ListQuotaPreferencesResponse', +'QuotaConfig', +'QuotaDetails', +'QuotaIncreaseEligibility', +'QuotaInfo', +'QuotaPreference', +'QuotaSafetyCheck', +'RolloutInfo', +'UpdateQuotaPreferenceRequest', +) diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/gapic_metadata.json new file mode 100644 index 000000000000..dde44e1b52f2 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/gapic_metadata.json @@ -0,0 +1,118 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.cloudquotas_v1", + "protoPackage": "google.api.cloudquotas.v1", + "schema": "1.0", + "services": { + "CloudQuotas": { + "clients": { + "grpc": { + "libraryClient": "CloudQuotasClient", + "rpcs": { + "CreateQuotaPreference": { + "methods": [ + "create_quota_preference" + ] + }, + "GetQuotaInfo": { + "methods": [ + "get_quota_info" + ] + }, + "GetQuotaPreference": { + "methods": [ + "get_quota_preference" + ] + }, + "ListQuotaInfos": { + "methods": [ + "list_quota_infos" + ] + }, + "ListQuotaPreferences": { + "methods": [ + "list_quota_preferences" + ] + }, + "UpdateQuotaPreference": { + "methods": [ + "update_quota_preference" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudQuotasAsyncClient", + "rpcs": { + "CreateQuotaPreference": { + "methods": [ + "create_quota_preference" + ] + }, + "GetQuotaInfo": { + "methods": [ + "get_quota_info" + ] + }, + "GetQuotaPreference": { + "methods": [ + "get_quota_preference" + ] + }, + "ListQuotaInfos": { + "methods": [ + "list_quota_infos" + ] + }, + "ListQuotaPreferences": { + "methods": [ + "list_quota_preferences" + ] + }, + "UpdateQuotaPreference": { + "methods": [ + "update_quota_preference" + ] + } + } + }, + "rest": { + "libraryClient": "CloudQuotasClient", + "rpcs": { + "CreateQuotaPreference": { + "methods": [ + "create_quota_preference" + ] + }, + "GetQuotaInfo": { + "methods": [ + "get_quota_info" + ] + }, + "GetQuotaPreference": { + "methods": [ + "get_quota_preference" + ] + }, + "ListQuotaInfos": { + "methods": [ + "list_quota_infos" + ] + }, + "ListQuotaPreferences": { + "methods": [ + "list_quota_preferences" + ] + }, + "UpdateQuotaPreference": { + "methods": [ + "update_quota_preference" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/gapic_version.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/py.typed b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/py.typed new file mode 100644 index 000000000000..13b6e7a7c797 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-quotas package uses inline types. diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/__init__.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/__init__.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/__init__.py new file mode 100644 index 000000000000..266cc8849c9f --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import CloudQuotasClient +from .async_client import CloudQuotasAsyncClient + +__all__ = ( + 'CloudQuotasClient', + 'CloudQuotasAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/async_client.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/async_client.py new file mode 100644 index 000000000000..510a15a65eea --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/async_client.py @@ -0,0 +1,981 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.cloudquotas_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.cloudquotas_v1.services.cloud_quotas import pagers +from google.cloud.cloudquotas_v1.types import cloudquotas +from google.cloud.cloudquotas_v1.types import resources +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudQuotasTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CloudQuotasGrpcAsyncIOTransport +from .client import CloudQuotasClient + + +class CloudQuotasAsyncClient: + """The Cloud Quotas API is an infrastructure service for Google + Cloud that lets service consumers list and manage their resource + usage limits. + + - List/Get the metadata and current status of the quotas for a + service. + - Create/Update quota preferencess that declare the preferred + quota values. + - Check the status of a quota preference request. + - List/Get pending and historical quota preference. + """ + + _client: CloudQuotasClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = CloudQuotasClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudQuotasClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = CloudQuotasClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = CloudQuotasClient._DEFAULT_UNIVERSE + + quota_info_path = staticmethod(CloudQuotasClient.quota_info_path) + parse_quota_info_path = staticmethod(CloudQuotasClient.parse_quota_info_path) + quota_preference_path = staticmethod(CloudQuotasClient.quota_preference_path) + parse_quota_preference_path = staticmethod(CloudQuotasClient.parse_quota_preference_path) + common_billing_account_path = staticmethod(CloudQuotasClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(CloudQuotasClient.parse_common_billing_account_path) + common_folder_path = staticmethod(CloudQuotasClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudQuotasClient.parse_common_folder_path) + common_organization_path = staticmethod(CloudQuotasClient.common_organization_path) + parse_common_organization_path = staticmethod(CloudQuotasClient.parse_common_organization_path) + common_project_path = staticmethod(CloudQuotasClient.common_project_path) + parse_common_project_path = staticmethod(CloudQuotasClient.parse_common_project_path) + common_location_path = staticmethod(CloudQuotasClient.common_location_path) + parse_common_location_path = staticmethod(CloudQuotasClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudQuotasAsyncClient: The constructed client. + """ + return CloudQuotasClient.from_service_account_info.__func__(CloudQuotasAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudQuotasAsyncClient: The constructed client. + """ + return CloudQuotasClient.from_service_account_file.__func__(CloudQuotasAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CloudQuotasClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CloudQuotasTransport: + """Returns the transport used by the client instance. + + Returns: + CloudQuotasTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = CloudQuotasClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudQuotasTransport, Callable[..., CloudQuotasTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud quotas async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,CloudQuotasTransport,Callable[..., CloudQuotasTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CloudQuotasTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CloudQuotasClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_quota_infos(self, + request: Optional[Union[cloudquotas.ListQuotaInfosRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQuotaInfosAsyncPager: + r"""Lists QuotaInfos of all quotas for a given project, + folder or organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + async def sample_list_quota_infos(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaInfosRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_infos(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.cloudquotas_v1.types.ListQuotaInfosRequest, dict]]): + The request object. Message for requesting list of + QuotaInfos + parent (:class:`str`): + Required. Parent value of QuotaInfo resources. Listing + across different resource containers (such as + 'projects/-') is not allowed. + + Example names: + ``projects/123/locations/global/services/compute.googleapis.com`` + ``folders/234/locations/global/services/compute.googleapis.com`` + ``organizations/345/locations/global/services/compute.googleapis.com`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaInfosAsyncPager: + Message for response to listing + QuotaInfos + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloudquotas.ListQuotaInfosRequest): + request = cloudquotas.ListQuotaInfosRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_quota_infos] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListQuotaInfosAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_quota_info(self, + request: Optional[Union[cloudquotas.GetQuotaInfoRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaInfo: + r"""Retrieve the QuotaInfo of a quota for a project, + folder or organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + async def sample_get_quota_info(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.GetQuotaInfoRequest( + name="name_value", + ) + + # Make the request + response = await client.get_quota_info(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.cloudquotas_v1.types.GetQuotaInfoRequest, dict]]): + The request object. Message for getting a QuotaInfo + name (:class:`str`): + Required. The resource name of the quota info. + + An example name: + ``projects/123/locations/global/services/compute.googleapis.com/quotaInfos/CpusPerProjectPerRegion`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaInfo: + QuotaInfo represents information + about a particular quota for a given + project, folder or organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloudquotas.GetQuotaInfoRequest): + request = cloudquotas.GetQuotaInfoRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_quota_info] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_quota_preferences(self, + request: Optional[Union[cloudquotas.ListQuotaPreferencesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQuotaPreferencesAsyncPager: + r"""Lists QuotaPreferences in a given project, folder or + organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + async def sample_list_quota_preferences(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaPreferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_preferences(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.cloudquotas_v1.types.ListQuotaPreferencesRequest, dict]]): + The request object. Message for requesting list of + QuotaPreferences + parent (:class:`str`): + Required. Parent value of QuotaPreference resources. + Listing across different resource containers (such as + 'projects/-') is not allowed. + + When the value starts with 'folders' or 'organizations', + it lists the QuotaPreferences for org quotas in the + container. It does not list the QuotaPreferences in the + descendant projects of the container. + + Example parents: ``projects/123/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaPreferencesAsyncPager: + Message for response to listing + QuotaPreferences + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloudquotas.ListQuotaPreferencesRequest): + request = cloudquotas.ListQuotaPreferencesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_quota_preferences] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListQuotaPreferencesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_quota_preference(self, + request: Optional[Union[cloudquotas.GetQuotaPreferenceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaPreference: + r"""Gets details of a single QuotaPreference. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + async def sample_get_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.GetQuotaPreferenceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_quota_preference(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.cloudquotas_v1.types.GetQuotaPreferenceRequest, dict]]): + The request object. Message for getting a QuotaPreference + name (:class:`str`): + Required. Name of the resource + + Example name: + ``projects/123/locations/global/quota_preferences/my-config-for-us-east1`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloudquotas.GetQuotaPreferenceRequest): + request = cloudquotas.GetQuotaPreferenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_quota_preference] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_quota_preference(self, + request: Optional[Union[cloudquotas.CreateQuotaPreferenceRequest, dict]] = None, + *, + parent: Optional[str] = None, + quota_preference: Optional[resources.QuotaPreference] = None, + quota_preference_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaPreference: + r"""Creates a new QuotaPreference that declares the + desired value for a quota. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + async def sample_create_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + + request = cloudquotas_v1.CreateQuotaPreferenceRequest( + parent="parent_value", + quota_preference=quota_preference, + ) + + # Make the request + response = await client.create_quota_preference(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.cloudquotas_v1.types.CreateQuotaPreferenceRequest, dict]]): + The request object. Message for creating a + QuotaPreference + parent (:class:`str`): + Required. Value for parent. + + Example: ``projects/123/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + quota_preference (:class:`google.cloud.cloudquotas_v1.types.QuotaPreference`): + Required. The resource being created + This corresponds to the ``quota_preference`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + quota_preference_id (:class:`str`): + Optional. Id of the requesting + object, must be unique under its parent. + If client does not set this field, the + service will generate one. + + This corresponds to the ``quota_preference_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, quota_preference, quota_preference_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloudquotas.CreateQuotaPreferenceRequest): + request = cloudquotas.CreateQuotaPreferenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if quota_preference is not None: + request.quota_preference = quota_preference + if quota_preference_id is not None: + request.quota_preference_id = quota_preference_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_quota_preference] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_quota_preference(self, + request: Optional[Union[cloudquotas.UpdateQuotaPreferenceRequest, dict]] = None, + *, + quota_preference: Optional[resources.QuotaPreference] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaPreference: + r"""Updates the parameters of a single QuotaPreference. + It can updates the config in any states, not just the + ones pending approval. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + async def sample_update_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + + request = cloudquotas_v1.UpdateQuotaPreferenceRequest( + quota_preference=quota_preference, + ) + + # Make the request + response = await client.update_quota_preference(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.cloudquotas_v1.types.UpdateQuotaPreferenceRequest, dict]]): + The request object. Message for updating a + QuotaPreference + quota_preference (:class:`google.cloud.cloudquotas_v1.types.QuotaPreference`): + Required. The resource being updated + This corresponds to the ``quota_preference`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the QuotaPreference resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([quota_preference, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloudquotas.UpdateQuotaPreferenceRequest): + request = cloudquotas.UpdateQuotaPreferenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if quota_preference is not None: + request.quota_preference = quota_preference + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_quota_preference] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("quota_preference.name", request.quota_preference.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "CloudQuotasAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "CloudQuotasAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/client.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/client.py new file mode 100644 index 000000000000..3d25ee56ca3f --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/client.py @@ -0,0 +1,1314 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.cloudquotas_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.cloudquotas_v1.services.cloud_quotas import pagers +from google.cloud.cloudquotas_v1.types import cloudquotas +from google.cloud.cloudquotas_v1.types import resources +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudQuotasTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CloudQuotasGrpcTransport +from .transports.grpc_asyncio import CloudQuotasGrpcAsyncIOTransport +from .transports.rest import CloudQuotasRestTransport + + +class CloudQuotasClientMeta(type): + """Metaclass for the CloudQuotas client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudQuotasTransport]] + _transport_registry["grpc"] = CloudQuotasGrpcTransport + _transport_registry["grpc_asyncio"] = CloudQuotasGrpcAsyncIOTransport + _transport_registry["rest"] = CloudQuotasRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[CloudQuotasTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudQuotasClient(metaclass=CloudQuotasClientMeta): + """The Cloud Quotas API is an infrastructure service for Google + Cloud that lets service consumers list and manage their resource + usage limits. + + - List/Get the metadata and current status of the quotas for a + service. + - Create/Update quota preferencess that declare the preferred + quota values. + - Check the status of a quota preference request. + - List/Get pending and historical quota preference. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "cloudquotas.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "cloudquotas.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudQuotasClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudQuotasClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudQuotasTransport: + """Returns the transport used by the client instance. + + Returns: + CloudQuotasTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def quota_info_path(project: str,location: str,service: str,quota_info: str,) -> str: + """Returns a fully-qualified quota_info string.""" + return "projects/{project}/locations/{location}/services/{service}/quotaInfos/{quota_info}".format(project=project, location=location, service=service, quota_info=quota_info, ) + + @staticmethod + def parse_quota_info_path(path: str) -> Dict[str,str]: + """Parses a quota_info path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)/quotaInfos/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def quota_preference_path(project: str,location: str,quota_preference: str,) -> str: + """Returns a fully-qualified quota_preference string.""" + return "projects/{project}/locations/{location}/quotaPreferences/{quota_preference}".format(project=project, location=location, quota_preference=quota_preference, ) + + @staticmethod + def parse_quota_preference_path(path: str) -> Dict[str,str]: + """Parses a quota_preference path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/quotaPreferences/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = CloudQuotasClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = CloudQuotasClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = CloudQuotasClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = CloudQuotasClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudQuotasTransport, Callable[..., CloudQuotasTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud quotas client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,CloudQuotasTransport,Callable[..., CloudQuotasTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CloudQuotasTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = CloudQuotasClient._read_environment_variables() + self._client_cert_source = CloudQuotasClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = CloudQuotasClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, CloudQuotasTransport) + if transport_provided: + # transport is a CloudQuotasTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(CloudQuotasTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + CloudQuotasClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[CloudQuotasTransport], Callable[..., CloudQuotasTransport]] = ( + CloudQuotasClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., CloudQuotasTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_quota_infos(self, + request: Optional[Union[cloudquotas.ListQuotaInfosRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQuotaInfosPager: + r"""Lists QuotaInfos of all quotas for a given project, + folder or organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + def sample_list_quota_infos(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaInfosRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_infos(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.cloudquotas_v1.types.ListQuotaInfosRequest, dict]): + The request object. Message for requesting list of + QuotaInfos + parent (str): + Required. Parent value of QuotaInfo resources. Listing + across different resource containers (such as + 'projects/-') is not allowed. + + Example names: + ``projects/123/locations/global/services/compute.googleapis.com`` + ``folders/234/locations/global/services/compute.googleapis.com`` + ``organizations/345/locations/global/services/compute.googleapis.com`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaInfosPager: + Message for response to listing + QuotaInfos + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloudquotas.ListQuotaInfosRequest): + request = cloudquotas.ListQuotaInfosRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_quota_infos] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListQuotaInfosPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_quota_info(self, + request: Optional[Union[cloudquotas.GetQuotaInfoRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaInfo: + r"""Retrieve the QuotaInfo of a quota for a project, + folder or organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + def sample_get_quota_info(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + request = cloudquotas_v1.GetQuotaInfoRequest( + name="name_value", + ) + + # Make the request + response = client.get_quota_info(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.cloudquotas_v1.types.GetQuotaInfoRequest, dict]): + The request object. Message for getting a QuotaInfo + name (str): + Required. The resource name of the quota info. + + An example name: + ``projects/123/locations/global/services/compute.googleapis.com/quotaInfos/CpusPerProjectPerRegion`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaInfo: + QuotaInfo represents information + about a particular quota for a given + project, folder or organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloudquotas.GetQuotaInfoRequest): + request = cloudquotas.GetQuotaInfoRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_quota_info] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_quota_preferences(self, + request: Optional[Union[cloudquotas.ListQuotaPreferencesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQuotaPreferencesPager: + r"""Lists QuotaPreferences in a given project, folder or + organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + def sample_list_quota_preferences(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaPreferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_preferences(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.cloudquotas_v1.types.ListQuotaPreferencesRequest, dict]): + The request object. Message for requesting list of + QuotaPreferences + parent (str): + Required. Parent value of QuotaPreference resources. + Listing across different resource containers (such as + 'projects/-') is not allowed. + + When the value starts with 'folders' or 'organizations', + it lists the QuotaPreferences for org quotas in the + container. It does not list the QuotaPreferences in the + descendant projects of the container. + + Example parents: ``projects/123/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaPreferencesPager: + Message for response to listing + QuotaPreferences + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloudquotas.ListQuotaPreferencesRequest): + request = cloudquotas.ListQuotaPreferencesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_quota_preferences] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListQuotaPreferencesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_quota_preference(self, + request: Optional[Union[cloudquotas.GetQuotaPreferenceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaPreference: + r"""Gets details of a single QuotaPreference. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + def sample_get_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + request = cloudquotas_v1.GetQuotaPreferenceRequest( + name="name_value", + ) + + # Make the request + response = client.get_quota_preference(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.cloudquotas_v1.types.GetQuotaPreferenceRequest, dict]): + The request object. Message for getting a QuotaPreference + name (str): + Required. Name of the resource + + Example name: + ``projects/123/locations/global/quota_preferences/my-config-for-us-east1`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloudquotas.GetQuotaPreferenceRequest): + request = cloudquotas.GetQuotaPreferenceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_quota_preference] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_quota_preference(self, + request: Optional[Union[cloudquotas.CreateQuotaPreferenceRequest, dict]] = None, + *, + parent: Optional[str] = None, + quota_preference: Optional[resources.QuotaPreference] = None, + quota_preference_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaPreference: + r"""Creates a new QuotaPreference that declares the + desired value for a quota. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + def sample_create_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + + request = cloudquotas_v1.CreateQuotaPreferenceRequest( + parent="parent_value", + quota_preference=quota_preference, + ) + + # Make the request + response = client.create_quota_preference(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.cloudquotas_v1.types.CreateQuotaPreferenceRequest, dict]): + The request object. Message for creating a + QuotaPreference + parent (str): + Required. Value for parent. + + Example: ``projects/123/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + quota_preference (google.cloud.cloudquotas_v1.types.QuotaPreference): + Required. The resource being created + This corresponds to the ``quota_preference`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + quota_preference_id (str): + Optional. Id of the requesting + object, must be unique under its parent. + If client does not set this field, the + service will generate one. + + This corresponds to the ``quota_preference_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, quota_preference, quota_preference_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloudquotas.CreateQuotaPreferenceRequest): + request = cloudquotas.CreateQuotaPreferenceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if quota_preference is not None: + request.quota_preference = quota_preference + if quota_preference_id is not None: + request.quota_preference_id = quota_preference_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_quota_preference] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_quota_preference(self, + request: Optional[Union[cloudquotas.UpdateQuotaPreferenceRequest, dict]] = None, + *, + quota_preference: Optional[resources.QuotaPreference] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaPreference: + r"""Updates the parameters of a single QuotaPreference. + It can updates the config in any states, not just the + ones pending approval. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + def sample_update_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + + request = cloudquotas_v1.UpdateQuotaPreferenceRequest( + quota_preference=quota_preference, + ) + + # Make the request + response = client.update_quota_preference(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.cloudquotas_v1.types.UpdateQuotaPreferenceRequest, dict]): + The request object. Message for updating a + QuotaPreference + quota_preference (google.cloud.cloudquotas_v1.types.QuotaPreference): + Required. The resource being updated + This corresponds to the ``quota_preference`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the QuotaPreference resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([quota_preference, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloudquotas.UpdateQuotaPreferenceRequest): + request = cloudquotas.UpdateQuotaPreferenceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if quota_preference is not None: + request.quota_preference = quota_preference + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_quota_preference] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("quota_preference.name", request.quota_preference.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CloudQuotasClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "CloudQuotasClient", +) diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/pagers.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/pagers.py new file mode 100644 index 000000000000..e58c99819f0e --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/pagers.py @@ -0,0 +1,298 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.cloudquotas_v1.types import cloudquotas +from google.cloud.cloudquotas_v1.types import resources + + +class ListQuotaInfosPager: + """A pager for iterating through ``list_quota_infos`` requests. + + This class thinly wraps an initial + :class:`google.cloud.cloudquotas_v1.types.ListQuotaInfosResponse` object, and + provides an ``__iter__`` method to iterate through its + ``quota_infos`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListQuotaInfos`` requests and continue to iterate + through the ``quota_infos`` field on the + corresponding responses. + + All the usual :class:`google.cloud.cloudquotas_v1.types.ListQuotaInfosResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloudquotas.ListQuotaInfosResponse], + request: cloudquotas.ListQuotaInfosRequest, + response: cloudquotas.ListQuotaInfosResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.cloudquotas_v1.types.ListQuotaInfosRequest): + The initial request object. + response (google.cloud.cloudquotas_v1.types.ListQuotaInfosResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudquotas.ListQuotaInfosRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudquotas.ListQuotaInfosResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.QuotaInfo]: + for page in self.pages: + yield from page.quota_infos + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListQuotaInfosAsyncPager: + """A pager for iterating through ``list_quota_infos`` requests. + + This class thinly wraps an initial + :class:`google.cloud.cloudquotas_v1.types.ListQuotaInfosResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``quota_infos`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListQuotaInfos`` requests and continue to iterate + through the ``quota_infos`` field on the + corresponding responses. + + All the usual :class:`google.cloud.cloudquotas_v1.types.ListQuotaInfosResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloudquotas.ListQuotaInfosResponse]], + request: cloudquotas.ListQuotaInfosRequest, + response: cloudquotas.ListQuotaInfosResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.cloudquotas_v1.types.ListQuotaInfosRequest): + The initial request object. + response (google.cloud.cloudquotas_v1.types.ListQuotaInfosResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudquotas.ListQuotaInfosRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudquotas.ListQuotaInfosResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.QuotaInfo]: + async def async_generator(): + async for page in self.pages: + for response in page.quota_infos: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListQuotaPreferencesPager: + """A pager for iterating through ``list_quota_preferences`` requests. + + This class thinly wraps an initial + :class:`google.cloud.cloudquotas_v1.types.ListQuotaPreferencesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``quota_preferences`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListQuotaPreferences`` requests and continue to iterate + through the ``quota_preferences`` field on the + corresponding responses. + + All the usual :class:`google.cloud.cloudquotas_v1.types.ListQuotaPreferencesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloudquotas.ListQuotaPreferencesResponse], + request: cloudquotas.ListQuotaPreferencesRequest, + response: cloudquotas.ListQuotaPreferencesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.cloudquotas_v1.types.ListQuotaPreferencesRequest): + The initial request object. + response (google.cloud.cloudquotas_v1.types.ListQuotaPreferencesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudquotas.ListQuotaPreferencesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudquotas.ListQuotaPreferencesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.QuotaPreference]: + for page in self.pages: + yield from page.quota_preferences + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListQuotaPreferencesAsyncPager: + """A pager for iterating through ``list_quota_preferences`` requests. + + This class thinly wraps an initial + :class:`google.cloud.cloudquotas_v1.types.ListQuotaPreferencesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``quota_preferences`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListQuotaPreferences`` requests and continue to iterate + through the ``quota_preferences`` field on the + corresponding responses. + + All the usual :class:`google.cloud.cloudquotas_v1.types.ListQuotaPreferencesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloudquotas.ListQuotaPreferencesResponse]], + request: cloudquotas.ListQuotaPreferencesRequest, + response: cloudquotas.ListQuotaPreferencesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.cloudquotas_v1.types.ListQuotaPreferencesRequest): + The initial request object. + response (google.cloud.cloudquotas_v1.types.ListQuotaPreferencesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudquotas.ListQuotaPreferencesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudquotas.ListQuotaPreferencesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.QuotaPreference]: + async def async_generator(): + async for page in self.pages: + for response in page.quota_preferences: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/README.rst b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/README.rst new file mode 100644 index 000000000000..318b0588640f --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`CloudQuotasTransport` is the ABC for all transports. +- public child `CloudQuotasGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `CloudQuotasGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseCloudQuotasRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `CloudQuotasRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/__init__.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/__init__.py new file mode 100644 index 000000000000..bf449d78f14f --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudQuotasTransport +from .grpc import CloudQuotasGrpcTransport +from .grpc_asyncio import CloudQuotasGrpcAsyncIOTransport +from .rest import CloudQuotasRestTransport +from .rest import CloudQuotasRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudQuotasTransport]] +_transport_registry['grpc'] = CloudQuotasGrpcTransport +_transport_registry['grpc_asyncio'] = CloudQuotasGrpcAsyncIOTransport +_transport_registry['rest'] = CloudQuotasRestTransport + +__all__ = ( + 'CloudQuotasTransport', + 'CloudQuotasGrpcTransport', + 'CloudQuotasGrpcAsyncIOTransport', + 'CloudQuotasRestTransport', + 'CloudQuotasRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/base.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/base.py new file mode 100644 index 000000000000..bb02e6cdece6 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/base.py @@ -0,0 +1,279 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.cloudquotas_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.cloudquotas_v1.types import cloudquotas +from google.cloud.cloudquotas_v1.types import resources + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class CloudQuotasTransport(abc.ABC): + """Abstract transport class for CloudQuotas.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'cloudquotas.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudquotas.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_quota_infos: gapic_v1.method.wrap_method( + self.list_quota_infos, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_quota_info: gapic_v1.method.wrap_method( + self.get_quota_info, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_quota_preferences: gapic_v1.method.wrap_method( + self.list_quota_preferences, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_quota_preference: gapic_v1.method.wrap_method( + self.get_quota_preference, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_quota_preference: gapic_v1.method.wrap_method( + self.create_quota_preference, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_quota_preference: gapic_v1.method.wrap_method( + self.update_quota_preference, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_quota_infos(self) -> Callable[ + [cloudquotas.ListQuotaInfosRequest], + Union[ + cloudquotas.ListQuotaInfosResponse, + Awaitable[cloudquotas.ListQuotaInfosResponse] + ]]: + raise NotImplementedError() + + @property + def get_quota_info(self) -> Callable[ + [cloudquotas.GetQuotaInfoRequest], + Union[ + resources.QuotaInfo, + Awaitable[resources.QuotaInfo] + ]]: + raise NotImplementedError() + + @property + def list_quota_preferences(self) -> Callable[ + [cloudquotas.ListQuotaPreferencesRequest], + Union[ + cloudquotas.ListQuotaPreferencesResponse, + Awaitable[cloudquotas.ListQuotaPreferencesResponse] + ]]: + raise NotImplementedError() + + @property + def get_quota_preference(self) -> Callable[ + [cloudquotas.GetQuotaPreferenceRequest], + Union[ + resources.QuotaPreference, + Awaitable[resources.QuotaPreference] + ]]: + raise NotImplementedError() + + @property + def create_quota_preference(self) -> Callable[ + [cloudquotas.CreateQuotaPreferenceRequest], + Union[ + resources.QuotaPreference, + Awaitable[resources.QuotaPreference] + ]]: + raise NotImplementedError() + + @property + def update_quota_preference(self) -> Callable[ + [cloudquotas.UpdateQuotaPreferenceRequest], + Union[ + resources.QuotaPreference, + Awaitable[resources.QuotaPreference] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'CloudQuotasTransport', +) diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/grpc.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/grpc.py new file mode 100644 index 000000000000..bc566e7c8fcc --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/grpc.py @@ -0,0 +1,416 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.cloudquotas_v1.types import cloudquotas +from google.cloud.cloudquotas_v1.types import resources +from .base import CloudQuotasTransport, DEFAULT_CLIENT_INFO + + +class CloudQuotasGrpcTransport(CloudQuotasTransport): + """gRPC backend transport for CloudQuotas. + + The Cloud Quotas API is an infrastructure service for Google + Cloud that lets service consumers list and manage their resource + usage limits. + + - List/Get the metadata and current status of the quotas for a + service. + - Create/Update quota preferencess that declare the preferred + quota values. + - Check the status of a quota preference request. + - List/Get pending and historical quota preference. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'cloudquotas.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudquotas.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'cloudquotas.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_quota_infos(self) -> Callable[ + [cloudquotas.ListQuotaInfosRequest], + cloudquotas.ListQuotaInfosResponse]: + r"""Return a callable for the list quota infos method over gRPC. + + Lists QuotaInfos of all quotas for a given project, + folder or organization. + + Returns: + Callable[[~.ListQuotaInfosRequest], + ~.ListQuotaInfosResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_quota_infos' not in self._stubs: + self._stubs['list_quota_infos'] = self.grpc_channel.unary_unary( + '/google.api.cloudquotas.v1.CloudQuotas/ListQuotaInfos', + request_serializer=cloudquotas.ListQuotaInfosRequest.serialize, + response_deserializer=cloudquotas.ListQuotaInfosResponse.deserialize, + ) + return self._stubs['list_quota_infos'] + + @property + def get_quota_info(self) -> Callable[ + [cloudquotas.GetQuotaInfoRequest], + resources.QuotaInfo]: + r"""Return a callable for the get quota info method over gRPC. + + Retrieve the QuotaInfo of a quota for a project, + folder or organization. + + Returns: + Callable[[~.GetQuotaInfoRequest], + ~.QuotaInfo]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_quota_info' not in self._stubs: + self._stubs['get_quota_info'] = self.grpc_channel.unary_unary( + '/google.api.cloudquotas.v1.CloudQuotas/GetQuotaInfo', + request_serializer=cloudquotas.GetQuotaInfoRequest.serialize, + response_deserializer=resources.QuotaInfo.deserialize, + ) + return self._stubs['get_quota_info'] + + @property + def list_quota_preferences(self) -> Callable[ + [cloudquotas.ListQuotaPreferencesRequest], + cloudquotas.ListQuotaPreferencesResponse]: + r"""Return a callable for the list quota preferences method over gRPC. + + Lists QuotaPreferences in a given project, folder or + organization. + + Returns: + Callable[[~.ListQuotaPreferencesRequest], + ~.ListQuotaPreferencesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_quota_preferences' not in self._stubs: + self._stubs['list_quota_preferences'] = self.grpc_channel.unary_unary( + '/google.api.cloudquotas.v1.CloudQuotas/ListQuotaPreferences', + request_serializer=cloudquotas.ListQuotaPreferencesRequest.serialize, + response_deserializer=cloudquotas.ListQuotaPreferencesResponse.deserialize, + ) + return self._stubs['list_quota_preferences'] + + @property + def get_quota_preference(self) -> Callable[ + [cloudquotas.GetQuotaPreferenceRequest], + resources.QuotaPreference]: + r"""Return a callable for the get quota preference method over gRPC. + + Gets details of a single QuotaPreference. + + Returns: + Callable[[~.GetQuotaPreferenceRequest], + ~.QuotaPreference]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_quota_preference' not in self._stubs: + self._stubs['get_quota_preference'] = self.grpc_channel.unary_unary( + '/google.api.cloudquotas.v1.CloudQuotas/GetQuotaPreference', + request_serializer=cloudquotas.GetQuotaPreferenceRequest.serialize, + response_deserializer=resources.QuotaPreference.deserialize, + ) + return self._stubs['get_quota_preference'] + + @property + def create_quota_preference(self) -> Callable[ + [cloudquotas.CreateQuotaPreferenceRequest], + resources.QuotaPreference]: + r"""Return a callable for the create quota preference method over gRPC. + + Creates a new QuotaPreference that declares the + desired value for a quota. + + Returns: + Callable[[~.CreateQuotaPreferenceRequest], + ~.QuotaPreference]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_quota_preference' not in self._stubs: + self._stubs['create_quota_preference'] = self.grpc_channel.unary_unary( + '/google.api.cloudquotas.v1.CloudQuotas/CreateQuotaPreference', + request_serializer=cloudquotas.CreateQuotaPreferenceRequest.serialize, + response_deserializer=resources.QuotaPreference.deserialize, + ) + return self._stubs['create_quota_preference'] + + @property + def update_quota_preference(self) -> Callable[ + [cloudquotas.UpdateQuotaPreferenceRequest], + resources.QuotaPreference]: + r"""Return a callable for the update quota preference method over gRPC. + + Updates the parameters of a single QuotaPreference. + It can updates the config in any states, not just the + ones pending approval. + + Returns: + Callable[[~.UpdateQuotaPreferenceRequest], + ~.QuotaPreference]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_quota_preference' not in self._stubs: + self._stubs['update_quota_preference'] = self.grpc_channel.unary_unary( + '/google.api.cloudquotas.v1.CloudQuotas/UpdateQuotaPreference', + request_serializer=cloudquotas.UpdateQuotaPreferenceRequest.serialize, + response_deserializer=resources.QuotaPreference.deserialize, + ) + return self._stubs['update_quota_preference'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'CloudQuotasGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/grpc_asyncio.py new file mode 100644 index 000000000000..c1507fb10c36 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/grpc_asyncio.py @@ -0,0 +1,516 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.cloudquotas_v1.types import cloudquotas +from google.cloud.cloudquotas_v1.types import resources +from .base import CloudQuotasTransport, DEFAULT_CLIENT_INFO +from .grpc import CloudQuotasGrpcTransport + + +class CloudQuotasGrpcAsyncIOTransport(CloudQuotasTransport): + """gRPC AsyncIO backend transport for CloudQuotas. + + The Cloud Quotas API is an infrastructure service for Google + Cloud that lets service consumers list and manage their resource + usage limits. + + - List/Get the metadata and current status of the quotas for a + service. + - Create/Update quota preferencess that declare the preferred + quota values. + - Check the status of a quota preference request. + - List/Get pending and historical quota preference. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'cloudquotas.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'cloudquotas.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudquotas.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_quota_infos(self) -> Callable[ + [cloudquotas.ListQuotaInfosRequest], + Awaitable[cloudquotas.ListQuotaInfosResponse]]: + r"""Return a callable for the list quota infos method over gRPC. + + Lists QuotaInfos of all quotas for a given project, + folder or organization. + + Returns: + Callable[[~.ListQuotaInfosRequest], + Awaitable[~.ListQuotaInfosResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_quota_infos' not in self._stubs: + self._stubs['list_quota_infos'] = self.grpc_channel.unary_unary( + '/google.api.cloudquotas.v1.CloudQuotas/ListQuotaInfos', + request_serializer=cloudquotas.ListQuotaInfosRequest.serialize, + response_deserializer=cloudquotas.ListQuotaInfosResponse.deserialize, + ) + return self._stubs['list_quota_infos'] + + @property + def get_quota_info(self) -> Callable[ + [cloudquotas.GetQuotaInfoRequest], + Awaitable[resources.QuotaInfo]]: + r"""Return a callable for the get quota info method over gRPC. + + Retrieve the QuotaInfo of a quota for a project, + folder or organization. + + Returns: + Callable[[~.GetQuotaInfoRequest], + Awaitable[~.QuotaInfo]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_quota_info' not in self._stubs: + self._stubs['get_quota_info'] = self.grpc_channel.unary_unary( + '/google.api.cloudquotas.v1.CloudQuotas/GetQuotaInfo', + request_serializer=cloudquotas.GetQuotaInfoRequest.serialize, + response_deserializer=resources.QuotaInfo.deserialize, + ) + return self._stubs['get_quota_info'] + + @property + def list_quota_preferences(self) -> Callable[ + [cloudquotas.ListQuotaPreferencesRequest], + Awaitable[cloudquotas.ListQuotaPreferencesResponse]]: + r"""Return a callable for the list quota preferences method over gRPC. + + Lists QuotaPreferences in a given project, folder or + organization. + + Returns: + Callable[[~.ListQuotaPreferencesRequest], + Awaitable[~.ListQuotaPreferencesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_quota_preferences' not in self._stubs: + self._stubs['list_quota_preferences'] = self.grpc_channel.unary_unary( + '/google.api.cloudquotas.v1.CloudQuotas/ListQuotaPreferences', + request_serializer=cloudquotas.ListQuotaPreferencesRequest.serialize, + response_deserializer=cloudquotas.ListQuotaPreferencesResponse.deserialize, + ) + return self._stubs['list_quota_preferences'] + + @property + def get_quota_preference(self) -> Callable[ + [cloudquotas.GetQuotaPreferenceRequest], + Awaitable[resources.QuotaPreference]]: + r"""Return a callable for the get quota preference method over gRPC. + + Gets details of a single QuotaPreference. + + Returns: + Callable[[~.GetQuotaPreferenceRequest], + Awaitable[~.QuotaPreference]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_quota_preference' not in self._stubs: + self._stubs['get_quota_preference'] = self.grpc_channel.unary_unary( + '/google.api.cloudquotas.v1.CloudQuotas/GetQuotaPreference', + request_serializer=cloudquotas.GetQuotaPreferenceRequest.serialize, + response_deserializer=resources.QuotaPreference.deserialize, + ) + return self._stubs['get_quota_preference'] + + @property + def create_quota_preference(self) -> Callable[ + [cloudquotas.CreateQuotaPreferenceRequest], + Awaitable[resources.QuotaPreference]]: + r"""Return a callable for the create quota preference method over gRPC. + + Creates a new QuotaPreference that declares the + desired value for a quota. + + Returns: + Callable[[~.CreateQuotaPreferenceRequest], + Awaitable[~.QuotaPreference]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_quota_preference' not in self._stubs: + self._stubs['create_quota_preference'] = self.grpc_channel.unary_unary( + '/google.api.cloudquotas.v1.CloudQuotas/CreateQuotaPreference', + request_serializer=cloudquotas.CreateQuotaPreferenceRequest.serialize, + response_deserializer=resources.QuotaPreference.deserialize, + ) + return self._stubs['create_quota_preference'] + + @property + def update_quota_preference(self) -> Callable[ + [cloudquotas.UpdateQuotaPreferenceRequest], + Awaitable[resources.QuotaPreference]]: + r"""Return a callable for the update quota preference method over gRPC. + + Updates the parameters of a single QuotaPreference. + It can updates the config in any states, not just the + ones pending approval. + + Returns: + Callable[[~.UpdateQuotaPreferenceRequest], + Awaitable[~.QuotaPreference]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_quota_preference' not in self._stubs: + self._stubs['update_quota_preference'] = self.grpc_channel.unary_unary( + '/google.api.cloudquotas.v1.CloudQuotas/UpdateQuotaPreference', + request_serializer=cloudquotas.UpdateQuotaPreferenceRequest.serialize, + response_deserializer=resources.QuotaPreference.deserialize, + ) + return self._stubs['update_quota_preference'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_quota_infos: self._wrap_method( + self.list_quota_infos, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_quota_info: self._wrap_method( + self.get_quota_info, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_quota_preferences: self._wrap_method( + self.list_quota_preferences, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_quota_preference: self._wrap_method( + self.get_quota_preference, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_quota_preference: self._wrap_method( + self.create_quota_preference, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_quota_preference: self._wrap_method( + self.update_quota_preference, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + +__all__ = ( + 'CloudQuotasGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/rest.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/rest.py new file mode 100644 index 000000000000..4e7ec2434565 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/rest.py @@ -0,0 +1,837 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 + +from google.protobuf import json_format + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.cloudquotas_v1.types import cloudquotas +from google.cloud.cloudquotas_v1.types import resources + + +from .rest_base import _BaseCloudQuotasRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class CloudQuotasRestInterceptor: + """Interceptor for CloudQuotas. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CloudQuotasRestTransport. + + .. code-block:: python + class MyCustomCloudQuotasInterceptor(CloudQuotasRestInterceptor): + def pre_create_quota_preference(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_quota_preference(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_quota_info(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_quota_info(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_quota_preference(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_quota_preference(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_quota_infos(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_quota_infos(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_quota_preferences(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_quota_preferences(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_quota_preference(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_quota_preference(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CloudQuotasRestTransport(interceptor=MyCustomCloudQuotasInterceptor()) + client = CloudQuotasClient(transport=transport) + + + """ + def pre_create_quota_preference(self, request: cloudquotas.CreateQuotaPreferenceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudquotas.CreateQuotaPreferenceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_quota_preference + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudQuotas server. + """ + return request, metadata + + def post_create_quota_preference(self, response: resources.QuotaPreference) -> resources.QuotaPreference: + """Post-rpc interceptor for create_quota_preference + + Override in a subclass to manipulate the response + after it is returned by the CloudQuotas server but before + it is returned to user code. + """ + return response + + def pre_get_quota_info(self, request: cloudquotas.GetQuotaInfoRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudquotas.GetQuotaInfoRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_quota_info + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudQuotas server. + """ + return request, metadata + + def post_get_quota_info(self, response: resources.QuotaInfo) -> resources.QuotaInfo: + """Post-rpc interceptor for get_quota_info + + Override in a subclass to manipulate the response + after it is returned by the CloudQuotas server but before + it is returned to user code. + """ + return response + + def pre_get_quota_preference(self, request: cloudquotas.GetQuotaPreferenceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudquotas.GetQuotaPreferenceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_quota_preference + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudQuotas server. + """ + return request, metadata + + def post_get_quota_preference(self, response: resources.QuotaPreference) -> resources.QuotaPreference: + """Post-rpc interceptor for get_quota_preference + + Override in a subclass to manipulate the response + after it is returned by the CloudQuotas server but before + it is returned to user code. + """ + return response + + def pre_list_quota_infos(self, request: cloudquotas.ListQuotaInfosRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudquotas.ListQuotaInfosRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_quota_infos + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudQuotas server. + """ + return request, metadata + + def post_list_quota_infos(self, response: cloudquotas.ListQuotaInfosResponse) -> cloudquotas.ListQuotaInfosResponse: + """Post-rpc interceptor for list_quota_infos + + Override in a subclass to manipulate the response + after it is returned by the CloudQuotas server but before + it is returned to user code. + """ + return response + + def pre_list_quota_preferences(self, request: cloudquotas.ListQuotaPreferencesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudquotas.ListQuotaPreferencesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_quota_preferences + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudQuotas server. + """ + return request, metadata + + def post_list_quota_preferences(self, response: cloudquotas.ListQuotaPreferencesResponse) -> cloudquotas.ListQuotaPreferencesResponse: + """Post-rpc interceptor for list_quota_preferences + + Override in a subclass to manipulate the response + after it is returned by the CloudQuotas server but before + it is returned to user code. + """ + return response + + def pre_update_quota_preference(self, request: cloudquotas.UpdateQuotaPreferenceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudquotas.UpdateQuotaPreferenceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_quota_preference + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudQuotas server. + """ + return request, metadata + + def post_update_quota_preference(self, response: resources.QuotaPreference) -> resources.QuotaPreference: + """Post-rpc interceptor for update_quota_preference + + Override in a subclass to manipulate the response + after it is returned by the CloudQuotas server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CloudQuotasRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CloudQuotasRestInterceptor + + +class CloudQuotasRestTransport(_BaseCloudQuotasRestTransport): + """REST backend synchronous transport for CloudQuotas. + + The Cloud Quotas API is an infrastructure service for Google + Cloud that lets service consumers list and manage their resource + usage limits. + + - List/Get the metadata and current status of the quotas for a + service. + - Create/Update quota preferencess that declare the preferred + quota values. + - Check the status of a quota preference request. + - List/Get pending and historical quota preference. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'cloudquotas.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[CloudQuotasRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudquotas.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudQuotasRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateQuotaPreference(_BaseCloudQuotasRestTransport._BaseCreateQuotaPreference, CloudQuotasRestStub): + def __hash__(self): + return hash("CloudQuotasRestTransport.CreateQuotaPreference") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: cloudquotas.CreateQuotaPreferenceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> resources.QuotaPreference: + r"""Call the create quota preference method over HTTP. + + Args: + request (~.cloudquotas.CreateQuotaPreferenceRequest): + The request object. Message for creating a + QuotaPreference + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + + http_options = _BaseCloudQuotasRestTransport._BaseCreateQuotaPreference._get_http_options() + request, metadata = self._interceptor.pre_create_quota_preference(request, metadata) + transcoded_request = _BaseCloudQuotasRestTransport._BaseCreateQuotaPreference._get_transcoded_request(http_options, request) + + body = _BaseCloudQuotasRestTransport._BaseCreateQuotaPreference._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudQuotasRestTransport._BaseCreateQuotaPreference._get_query_params_json(transcoded_request) + + # Send the request + response = CloudQuotasRestTransport._CreateQuotaPreference._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.QuotaPreference() + pb_resp = resources.QuotaPreference.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_quota_preference(resp) + return resp + + class _GetQuotaInfo(_BaseCloudQuotasRestTransport._BaseGetQuotaInfo, CloudQuotasRestStub): + def __hash__(self): + return hash("CloudQuotasRestTransport.GetQuotaInfo") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: cloudquotas.GetQuotaInfoRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> resources.QuotaInfo: + r"""Call the get quota info method over HTTP. + + Args: + request (~.cloudquotas.GetQuotaInfoRequest): + The request object. Message for getting a QuotaInfo + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.QuotaInfo: + QuotaInfo represents information + about a particular quota for a given + project, folder or organization. + + """ + + http_options = _BaseCloudQuotasRestTransport._BaseGetQuotaInfo._get_http_options() + request, metadata = self._interceptor.pre_get_quota_info(request, metadata) + transcoded_request = _BaseCloudQuotasRestTransport._BaseGetQuotaInfo._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudQuotasRestTransport._BaseGetQuotaInfo._get_query_params_json(transcoded_request) + + # Send the request + response = CloudQuotasRestTransport._GetQuotaInfo._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.QuotaInfo() + pb_resp = resources.QuotaInfo.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_quota_info(resp) + return resp + + class _GetQuotaPreference(_BaseCloudQuotasRestTransport._BaseGetQuotaPreference, CloudQuotasRestStub): + def __hash__(self): + return hash("CloudQuotasRestTransport.GetQuotaPreference") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: cloudquotas.GetQuotaPreferenceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> resources.QuotaPreference: + r"""Call the get quota preference method over HTTP. + + Args: + request (~.cloudquotas.GetQuotaPreferenceRequest): + The request object. Message for getting a QuotaPreference + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + + http_options = _BaseCloudQuotasRestTransport._BaseGetQuotaPreference._get_http_options() + request, metadata = self._interceptor.pre_get_quota_preference(request, metadata) + transcoded_request = _BaseCloudQuotasRestTransport._BaseGetQuotaPreference._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudQuotasRestTransport._BaseGetQuotaPreference._get_query_params_json(transcoded_request) + + # Send the request + response = CloudQuotasRestTransport._GetQuotaPreference._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.QuotaPreference() + pb_resp = resources.QuotaPreference.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_quota_preference(resp) + return resp + + class _ListQuotaInfos(_BaseCloudQuotasRestTransport._BaseListQuotaInfos, CloudQuotasRestStub): + def __hash__(self): + return hash("CloudQuotasRestTransport.ListQuotaInfos") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: cloudquotas.ListQuotaInfosRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudquotas.ListQuotaInfosResponse: + r"""Call the list quota infos method over HTTP. + + Args: + request (~.cloudquotas.ListQuotaInfosRequest): + The request object. Message for requesting list of + QuotaInfos + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudquotas.ListQuotaInfosResponse: + Message for response to listing + QuotaInfos + + """ + + http_options = _BaseCloudQuotasRestTransport._BaseListQuotaInfos._get_http_options() + request, metadata = self._interceptor.pre_list_quota_infos(request, metadata) + transcoded_request = _BaseCloudQuotasRestTransport._BaseListQuotaInfos._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudQuotasRestTransport._BaseListQuotaInfos._get_query_params_json(transcoded_request) + + # Send the request + response = CloudQuotasRestTransport._ListQuotaInfos._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudquotas.ListQuotaInfosResponse() + pb_resp = cloudquotas.ListQuotaInfosResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_quota_infos(resp) + return resp + + class _ListQuotaPreferences(_BaseCloudQuotasRestTransport._BaseListQuotaPreferences, CloudQuotasRestStub): + def __hash__(self): + return hash("CloudQuotasRestTransport.ListQuotaPreferences") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: cloudquotas.ListQuotaPreferencesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudquotas.ListQuotaPreferencesResponse: + r"""Call the list quota preferences method over HTTP. + + Args: + request (~.cloudquotas.ListQuotaPreferencesRequest): + The request object. Message for requesting list of + QuotaPreferences + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudquotas.ListQuotaPreferencesResponse: + Message for response to listing + QuotaPreferences + + """ + + http_options = _BaseCloudQuotasRestTransport._BaseListQuotaPreferences._get_http_options() + request, metadata = self._interceptor.pre_list_quota_preferences(request, metadata) + transcoded_request = _BaseCloudQuotasRestTransport._BaseListQuotaPreferences._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudQuotasRestTransport._BaseListQuotaPreferences._get_query_params_json(transcoded_request) + + # Send the request + response = CloudQuotasRestTransport._ListQuotaPreferences._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudquotas.ListQuotaPreferencesResponse() + pb_resp = cloudquotas.ListQuotaPreferencesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_quota_preferences(resp) + return resp + + class _UpdateQuotaPreference(_BaseCloudQuotasRestTransport._BaseUpdateQuotaPreference, CloudQuotasRestStub): + def __hash__(self): + return hash("CloudQuotasRestTransport.UpdateQuotaPreference") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: cloudquotas.UpdateQuotaPreferenceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> resources.QuotaPreference: + r"""Call the update quota preference method over HTTP. + + Args: + request (~.cloudquotas.UpdateQuotaPreferenceRequest): + The request object. Message for updating a + QuotaPreference + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + + http_options = _BaseCloudQuotasRestTransport._BaseUpdateQuotaPreference._get_http_options() + request, metadata = self._interceptor.pre_update_quota_preference(request, metadata) + transcoded_request = _BaseCloudQuotasRestTransport._BaseUpdateQuotaPreference._get_transcoded_request(http_options, request) + + body = _BaseCloudQuotasRestTransport._BaseUpdateQuotaPreference._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudQuotasRestTransport._BaseUpdateQuotaPreference._get_query_params_json(transcoded_request) + + # Send the request + response = CloudQuotasRestTransport._UpdateQuotaPreference._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.QuotaPreference() + pb_resp = resources.QuotaPreference.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_quota_preference(resp) + return resp + + @property + def create_quota_preference(self) -> Callable[ + [cloudquotas.CreateQuotaPreferenceRequest], + resources.QuotaPreference]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateQuotaPreference(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_quota_info(self) -> Callable[ + [cloudquotas.GetQuotaInfoRequest], + resources.QuotaInfo]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetQuotaInfo(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_quota_preference(self) -> Callable[ + [cloudquotas.GetQuotaPreferenceRequest], + resources.QuotaPreference]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetQuotaPreference(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_quota_infos(self) -> Callable[ + [cloudquotas.ListQuotaInfosRequest], + cloudquotas.ListQuotaInfosResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListQuotaInfos(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_quota_preferences(self) -> Callable[ + [cloudquotas.ListQuotaPreferencesRequest], + cloudquotas.ListQuotaPreferencesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListQuotaPreferences(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_quota_preference(self) -> Callable[ + [cloudquotas.UpdateQuotaPreferenceRequest], + resources.QuotaPreference]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateQuotaPreference(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'CloudQuotasRestTransport', +) diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/rest_base.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/rest_base.py new file mode 100644 index 000000000000..02594527d010 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/rest_base.py @@ -0,0 +1,386 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import CloudQuotasTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.cloudquotas_v1.types import cloudquotas +from google.cloud.cloudquotas_v1.types import resources + + +class _BaseCloudQuotasRestTransport(CloudQuotasTransport): + """Base REST backend transport for CloudQuotas. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'cloudquotas.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudquotas.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateQuotaPreference: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/quotaPreferences', + 'body': 'quota_preference', + }, + { + 'method': 'post', + 'uri': '/v1/{parent=folders/*/locations/*}/quotaPreferences', + 'body': 'quota_preference', + }, + { + 'method': 'post', + 'uri': '/v1/{parent=organizations/*/locations/*}/quotaPreferences', + 'body': 'quota_preference', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloudquotas.CreateQuotaPreferenceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCloudQuotasRestTransport._BaseCreateQuotaPreference._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetQuotaInfo: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/services/*/quotaInfos/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/services/*/quotaInfos/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=folders/*/locations/*/services/*/quotaInfos/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloudquotas.GetQuotaInfoRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCloudQuotasRestTransport._BaseGetQuotaInfo._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetQuotaPreference: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/quotaPreferences/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/quotaPreferences/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=folders/*/locations/*/quotaPreferences/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloudquotas.GetQuotaPreferenceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCloudQuotasRestTransport._BaseGetQuotaPreference._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListQuotaInfos: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/services/*}/quotaInfos', + }, + { + 'method': 'get', + 'uri': '/v1/{parent=organizations/*/locations/*/services/*}/quotaInfos', + }, + { + 'method': 'get', + 'uri': '/v1/{parent=folders/*/locations/*/services/*}/quotaInfos', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloudquotas.ListQuotaInfosRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCloudQuotasRestTransport._BaseListQuotaInfos._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListQuotaPreferences: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/quotaPreferences', + }, + { + 'method': 'get', + 'uri': '/v1/{parent=folders/*/locations/*}/quotaPreferences', + }, + { + 'method': 'get', + 'uri': '/v1/{parent=organizations/*/locations/*}/quotaPreferences', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloudquotas.ListQuotaPreferencesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCloudQuotasRestTransport._BaseListQuotaPreferences._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateQuotaPreference: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{quota_preference.name=projects/*/locations/*/quotaPreferences/*}', + 'body': 'quota_preference', + }, + { + 'method': 'patch', + 'uri': '/v1/{quota_preference.name=folders/*/locations/*/quotaPreferences/*}', + 'body': 'quota_preference', + }, + { + 'method': 'patch', + 'uri': '/v1/{quota_preference.name=organizations/*/locations/*/quotaPreferences/*}', + 'body': 'quota_preference', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloudquotas.UpdateQuotaPreferenceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCloudQuotasRestTransport._BaseUpdateQuotaPreference._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + +__all__=( + '_BaseCloudQuotasRestTransport', +) diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/types/__init__.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/types/__init__.py new file mode 100644 index 000000000000..7d0886666b53 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/types/__init__.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloudquotas import ( + CreateQuotaPreferenceRequest, + GetQuotaInfoRequest, + GetQuotaPreferenceRequest, + ListQuotaInfosRequest, + ListQuotaInfosResponse, + ListQuotaPreferencesRequest, + ListQuotaPreferencesResponse, + UpdateQuotaPreferenceRequest, +) +from .resources import ( + DimensionsInfo, + QuotaConfig, + QuotaDetails, + QuotaIncreaseEligibility, + QuotaInfo, + QuotaPreference, + RolloutInfo, + QuotaSafetyCheck, +) + +__all__ = ( + 'CreateQuotaPreferenceRequest', + 'GetQuotaInfoRequest', + 'GetQuotaPreferenceRequest', + 'ListQuotaInfosRequest', + 'ListQuotaInfosResponse', + 'ListQuotaPreferencesRequest', + 'ListQuotaPreferencesResponse', + 'UpdateQuotaPreferenceRequest', + 'DimensionsInfo', + 'QuotaConfig', + 'QuotaDetails', + 'QuotaIncreaseEligibility', + 'QuotaInfo', + 'QuotaPreference', + 'RolloutInfo', + 'QuotaSafetyCheck', +) diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/types/cloudquotas.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/types/cloudquotas.py new file mode 100644 index 000000000000..67d4056c6a64 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/types/cloudquotas.py @@ -0,0 +1,322 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.cloudquotas_v1.types import resources +from google.protobuf import field_mask_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.api.cloudquotas.v1', + manifest={ + 'ListQuotaInfosRequest', + 'ListQuotaInfosResponse', + 'GetQuotaInfoRequest', + 'ListQuotaPreferencesRequest', + 'ListQuotaPreferencesResponse', + 'GetQuotaPreferenceRequest', + 'CreateQuotaPreferenceRequest', + 'UpdateQuotaPreferenceRequest', + }, +) + + +class ListQuotaInfosRequest(proto.Message): + r"""Message for requesting list of QuotaInfos + + Attributes: + parent (str): + Required. Parent value of QuotaInfo resources. Listing + across different resource containers (such as 'projects/-') + is not allowed. + + Example names: + ``projects/123/locations/global/services/compute.googleapis.com`` + ``folders/234/locations/global/services/compute.googleapis.com`` + ``organizations/345/locations/global/services/compute.googleapis.com`` + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListQuotaInfosResponse(proto.Message): + r"""Message for response to listing QuotaInfos + + Attributes: + quota_infos (MutableSequence[google.cloud.cloudquotas_v1.types.QuotaInfo]): + The list of QuotaInfo + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + quota_infos: MutableSequence[resources.QuotaInfo] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.QuotaInfo, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetQuotaInfoRequest(proto.Message): + r"""Message for getting a QuotaInfo + + Attributes: + name (str): + Required. The resource name of the quota info. + + An example name: + ``projects/123/locations/global/services/compute.googleapis.com/quotaInfos/CpusPerProjectPerRegion`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListQuotaPreferencesRequest(proto.Message): + r"""Message for requesting list of QuotaPreferences + + Attributes: + parent (str): + Required. Parent value of QuotaPreference resources. Listing + across different resource containers (such as 'projects/-') + is not allowed. + + When the value starts with 'folders' or 'organizations', it + lists the QuotaPreferences for org quotas in the container. + It does not list the QuotaPreferences in the descendant + projects of the container. + + Example parents: ``projects/123/locations/global`` + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filter result QuotaPreferences by their state, + type, create/update time range. + + Example filters: + ``reconciling=true AND request_type=CLOUD_CONSOLE``, + ``reconciling=true OR creation_time>2022-12-03T10:30:00`` + order_by (str): + Optional. How to order of the results. By default, the + results are ordered by create time. + + Example orders: ``quota_id``, ``service, create_time`` + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListQuotaPreferencesResponse(proto.Message): + r"""Message for response to listing QuotaPreferences + + Attributes: + quota_preferences (MutableSequence[google.cloud.cloudquotas_v1.types.QuotaPreference]): + The list of QuotaPreference + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + quota_preferences: MutableSequence[resources.QuotaPreference] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.QuotaPreference, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetQuotaPreferenceRequest(proto.Message): + r"""Message for getting a QuotaPreference + + Attributes: + name (str): + Required. Name of the resource + + Example name: + ``projects/123/locations/global/quota_preferences/my-config-for-us-east1`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateQuotaPreferenceRequest(proto.Message): + r"""Message for creating a QuotaPreference + + Attributes: + parent (str): + Required. Value for parent. + + Example: ``projects/123/locations/global`` + quota_preference_id (str): + Optional. Id of the requesting object, must + be unique under its parent. If client does not + set this field, the service will generate one. + quota_preference (google.cloud.cloudquotas_v1.types.QuotaPreference): + Required. The resource being created + ignore_safety_checks (MutableSequence[google.cloud.cloudquotas_v1.types.QuotaSafetyCheck]): + The list of quota safety checks to be + ignored. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + quota_preference_id: str = proto.Field( + proto.STRING, + number=2, + ) + quota_preference: resources.QuotaPreference = proto.Field( + proto.MESSAGE, + number=3, + message=resources.QuotaPreference, + ) + ignore_safety_checks: MutableSequence[resources.QuotaSafetyCheck] = proto.RepeatedField( + proto.ENUM, + number=4, + enum=resources.QuotaSafetyCheck, + ) + + +class UpdateQuotaPreferenceRequest(proto.Message): + r"""Message for updating a QuotaPreference + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the QuotaPreference resource by the update. + The fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + quota_preference (google.cloud.cloudquotas_v1.types.QuotaPreference): + Required. The resource being updated + allow_missing (bool): + Optional. If set to true, and the quota preference is not + found, a new one will be created. In this situation, + ``update_mask`` is ignored. + validate_only (bool): + Optional. If set to true, validate the + request, but do not actually update. Note that a + request being valid does not mean that the + request is guaranteed to be fulfilled. + ignore_safety_checks (MutableSequence[google.cloud.cloudquotas_v1.types.QuotaSafetyCheck]): + The list of quota safety checks to be + ignored. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + quota_preference: resources.QuotaPreference = proto.Field( + proto.MESSAGE, + number=2, + message=resources.QuotaPreference, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + ignore_safety_checks: MutableSequence[resources.QuotaSafetyCheck] = proto.RepeatedField( + proto.ENUM, + number=5, + enum=resources.QuotaSafetyCheck, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/types/resources.py b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/types/resources.py new file mode 100644 index 000000000000..1915c12e5b7b --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/google/cloud/cloudquotas_v1/types/resources.py @@ -0,0 +1,525 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.api.cloudquotas.v1', + manifest={ + 'QuotaSafetyCheck', + 'QuotaInfo', + 'QuotaIncreaseEligibility', + 'QuotaPreference', + 'QuotaConfig', + 'DimensionsInfo', + 'QuotaDetails', + 'RolloutInfo', + }, +) + + +class QuotaSafetyCheck(proto.Enum): + r"""Enumerations of quota safety checks. + + Values: + QUOTA_SAFETY_CHECK_UNSPECIFIED (0): + Unspecified quota safety check. + QUOTA_DECREASE_BELOW_USAGE (1): + Validates that a quota mutation would not + cause the consumer's effective limit to be lower + than the consumer's quota usage. + QUOTA_DECREASE_PERCENTAGE_TOO_HIGH (2): + Validates that a quota mutation would not + cause the consumer's effective limit to decrease + by more than 10 percent. + """ + QUOTA_SAFETY_CHECK_UNSPECIFIED = 0 + QUOTA_DECREASE_BELOW_USAGE = 1 + QUOTA_DECREASE_PERCENTAGE_TOO_HIGH = 2 + + +class QuotaInfo(proto.Message): + r"""QuotaInfo represents information about a particular quota for + a given project, folder or organization. + + Attributes: + name (str): + Resource name of this QuotaInfo. The ID component following + "locations/" must be "global". Example: + ``projects/123/locations/global/services/compute.googleapis.com/quotaInfos/CpusPerProjectPerRegion`` + quota_id (str): + The id of the quota, which is unquie within the service. + Example: ``CpusPerProjectPerRegion`` + metric (str): + The metric of the quota. It specifies the resources + consumption the quota is defined for. Example: + ``compute.googleapis.com/cpus`` + service (str): + The name of the service in which the quota is defined. + Example: ``compute.googleapis.com`` + is_precise (bool): + Whether this is a precise quota. A precise + quota is tracked with absolute precision. In + contrast, an imprecise quota is not tracked with + precision. + refresh_interval (str): + The reset time interval for the quota. + Refresh interval applies to rate quota only. + Example: "minute" for per minute, "day" for per + day, or "10 seconds" for every 10 seconds. + container_type (google.cloud.cloudquotas_v1.types.QuotaInfo.ContainerType): + The container type of the QuotaInfo. + dimensions (MutableSequence[str]): + The dimensions the quota is defined on. + metric_display_name (str): + The display name of the quota metric + quota_display_name (str): + The display name of the quota. + metric_unit (str): + The unit in which the metric value is + reported, e.g., "MByte". + quota_increase_eligibility (google.cloud.cloudquotas_v1.types.QuotaIncreaseEligibility): + Whether it is eligible to request a higher + quota value for this quota. + is_fixed (bool): + Whether the quota value is fixed or + adjustable + dimensions_infos (MutableSequence[google.cloud.cloudquotas_v1.types.DimensionsInfo]): + The collection of dimensions info ordered by + their dimensions from more specific ones to less + specific ones. + is_concurrent (bool): + Whether the quota is a concurrent quota. + Concurrent quotas are enforced on the total + number of concurrent operations in flight at any + given time. + service_request_quota_uri (str): + URI to the page where users can request more + quota for the cloud service—for example, + https://console.cloud.google.com/iam-admin/quotas. + """ + class ContainerType(proto.Enum): + r"""The enumeration of the types of a cloud resource container. + + Values: + CONTAINER_TYPE_UNSPECIFIED (0): + Unspecified container type. + PROJECT (1): + consumer project + FOLDER (2): + folder + ORGANIZATION (3): + organization + """ + CONTAINER_TYPE_UNSPECIFIED = 0 + PROJECT = 1 + FOLDER = 2 + ORGANIZATION = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + quota_id: str = proto.Field( + proto.STRING, + number=2, + ) + metric: str = proto.Field( + proto.STRING, + number=3, + ) + service: str = proto.Field( + proto.STRING, + number=4, + ) + is_precise: bool = proto.Field( + proto.BOOL, + number=5, + ) + refresh_interval: str = proto.Field( + proto.STRING, + number=6, + ) + container_type: ContainerType = proto.Field( + proto.ENUM, + number=7, + enum=ContainerType, + ) + dimensions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + metric_display_name: str = proto.Field( + proto.STRING, + number=9, + ) + quota_display_name: str = proto.Field( + proto.STRING, + number=10, + ) + metric_unit: str = proto.Field( + proto.STRING, + number=11, + ) + quota_increase_eligibility: 'QuotaIncreaseEligibility' = proto.Field( + proto.MESSAGE, + number=12, + message='QuotaIncreaseEligibility', + ) + is_fixed: bool = proto.Field( + proto.BOOL, + number=13, + ) + dimensions_infos: MutableSequence['DimensionsInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message='DimensionsInfo', + ) + is_concurrent: bool = proto.Field( + proto.BOOL, + number=15, + ) + service_request_quota_uri: str = proto.Field( + proto.STRING, + number=17, + ) + + +class QuotaIncreaseEligibility(proto.Message): + r"""Eligibility information regarding requesting increase + adjustment of a quota. + + Attributes: + is_eligible (bool): + Whether a higher quota value can be requested + for the quota. + ineligibility_reason (google.cloud.cloudquotas_v1.types.QuotaIncreaseEligibility.IneligibilityReason): + The reason of why it is ineligible to request increased + value of the quota. If the is_eligible field is true, it + defaults to INELIGIBILITY_REASON_UNSPECIFIED. + """ + class IneligibilityReason(proto.Enum): + r"""The enumeration of reasons when it is ineligible to request + increase adjustment. + + Values: + INELIGIBILITY_REASON_UNSPECIFIED (0): + Default value when is_eligible is true. + NO_VALID_BILLING_ACCOUNT (1): + The container is not linked with a valid + billing account. + OTHER (2): + Other reasons. + """ + INELIGIBILITY_REASON_UNSPECIFIED = 0 + NO_VALID_BILLING_ACCOUNT = 1 + OTHER = 2 + + is_eligible: bool = proto.Field( + proto.BOOL, + number=1, + ) + ineligibility_reason: IneligibilityReason = proto.Field( + proto.ENUM, + number=2, + enum=IneligibilityReason, + ) + + +class QuotaPreference(proto.Message): + r"""QuotaPreference represents the preferred quota configuration + specified for a project, folder or organization. There is only + one QuotaPreference resource for a quota value targeting a + unique set of dimensions. + + Attributes: + name (str): + Required except in the CREATE requests. The resource name of + the quota preference. The ID component following + "locations/" must be "global". Example: + ``projects/123/locations/global/quotaPreferences/my-config-for-us-east1`` + dimensions (MutableMapping[str, str]): + Immutable. The dimensions that this quota preference applies + to. The key of the map entry is the name of a dimension, + such as "region", "zone", "network_id", and the value of the + map entry is the dimension value. + + If a dimension is missing from the map of dimensions, the + quota preference applies to all the dimension values except + for those that have other quota preferences configured for + the specific value. + + NOTE: QuotaPreferences can only be applied across all values + of "user" and "resource" dimension. Do not set values for + "user" or "resource" in the dimension map. + + Example: {"provider", "Foo Inc"} where "provider" is a + service specific dimension. + quota_config (google.cloud.cloudquotas_v1.types.QuotaConfig): + Required. Preferred quota configuration. + etag (str): + Optional. The current etag of the quota + preference. If an etag is provided on update and + does not match the current server's etag of the + quota preference, the request will be blocked + and an ABORTED error will be returned. See + https://google.aip.dev/134#etags for more + details on etags. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + service (str): + Required. The name of the service to which + the quota preference is applied. + quota_id (str): + Required. The id of the quota to which the quota preference + is applied. A quota name is unique in the service. Example: + ``CpusPerProjectPerRegion`` + reconciling (bool): + Output only. Is the quota preference pending + Google Cloud approval and fulfillment. + justification (str): + The reason / justification for this quota + preference. + contact_email (str): + Input only. An email address that can be used to contact the + the user, in case Google Cloud needs more information to + make a decision before additional quota can be granted. + + When requesting a quota increase, the email address is + required. When requesting a quota decrease, the email + address is optional. For example, the email address is + optional when the ``QuotaConfig.preferred_value`` is smaller + than the ``QuotaDetails.reset_value``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + dimensions: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + quota_config: 'QuotaConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='QuotaConfig', + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + service: str = proto.Field( + proto.STRING, + number=7, + ) + quota_id: str = proto.Field( + proto.STRING, + number=8, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=10, + ) + justification: str = proto.Field( + proto.STRING, + number=11, + ) + contact_email: str = proto.Field( + proto.STRING, + number=12, + ) + + +class QuotaConfig(proto.Message): + r"""The preferred quota configuration. + + Attributes: + preferred_value (int): + Required. The preferred value. Must be + greater than or equal to -1. If set to -1, it + means the value is "unlimited". + state_detail (str): + Output only. Optional details about the state + of this quota preference. + granted_value (google.protobuf.wrappers_pb2.Int64Value): + Output only. Granted quota value. + trace_id (str): + Output only. The trace id that the Google + Cloud uses to provision the requested quota. + This trace id may be used by the client to + contact Cloud support to track the state of a + quota preference request. The trace id is only + produced for increase requests and is unique for + each request. The quota decrease requests do not + have a trace id. + annotations (MutableMapping[str, str]): + Optional. The annotations map for clients to + store small amounts of arbitrary data. Do not + put PII or other sensitive information here. See + https://google.aip.dev/128#annotations + request_origin (google.cloud.cloudquotas_v1.types.QuotaConfig.Origin): + Output only. The origin of the quota + preference request. + """ + class Origin(proto.Enum): + r"""The enumeration of the origins of quota preference requests. + + Values: + ORIGIN_UNSPECIFIED (0): + The unspecified value. + CLOUD_CONSOLE (1): + Created through Cloud Console. + AUTO_ADJUSTER (2): + Generated by automatic quota adjustment. + """ + ORIGIN_UNSPECIFIED = 0 + CLOUD_CONSOLE = 1 + AUTO_ADJUSTER = 2 + + preferred_value: int = proto.Field( + proto.INT64, + number=1, + ) + state_detail: str = proto.Field( + proto.STRING, + number=2, + ) + granted_value: wrappers_pb2.Int64Value = proto.Field( + proto.MESSAGE, + number=3, + message=wrappers_pb2.Int64Value, + ) + trace_id: str = proto.Field( + proto.STRING, + number=4, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + request_origin: Origin = proto.Field( + proto.ENUM, + number=6, + enum=Origin, + ) + + +class DimensionsInfo(proto.Message): + r"""The detailed quota information such as effective quota value + for a combination of dimensions. + + Attributes: + dimensions (MutableMapping[str, str]): + The map of dimensions for this dimensions + info. The key of a map entry is "region", "zone" + or the name of a service specific dimension, and + the value of a map entry is the value of the + dimension. If a dimension does not appear in + the map of dimensions, the dimensions info + applies to all the dimension values except for + those that have another DimenisonInfo instance + configured for the specific value. + Example: {"provider" : "Foo Inc"} where + "provider" is a service specific dimension of a + quota. + details (google.cloud.cloudquotas_v1.types.QuotaDetails): + Quota details for the specified dimensions. + applicable_locations (MutableSequence[str]): + The applicable regions or zones of this dimensions info. The + field will be set to ['global'] for quotas that are not per + region or per zone. Otherwise, it will be set to the list of + locations this dimension info is applicable to. + """ + + dimensions: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + details: 'QuotaDetails' = proto.Field( + proto.MESSAGE, + number=2, + message='QuotaDetails', + ) + applicable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class QuotaDetails(proto.Message): + r"""The quota details for a map of dimensions. + + Attributes: + value (int): + The value currently in effect and being + enforced. + rollout_info (google.cloud.cloudquotas_v1.types.RolloutInfo): + Rollout information of this quota. + This field is present only if the effective + limit will change due to the ongoing rollout of + the service config. + """ + + value: int = proto.Field( + proto.INT64, + number=1, + ) + rollout_info: 'RolloutInfo' = proto.Field( + proto.MESSAGE, + number=3, + message='RolloutInfo', + ) + + +class RolloutInfo(proto.Message): + r"""[Output only] Rollout information of a quota. + + Attributes: + ongoing_rollout (bool): + Whether there is an ongoing rollout for a + quota or not. + """ + + ongoing_rollout: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-quotas/v1/mypy.ini b/owl-bot-staging/google-cloud-quotas/v1/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-cloud-quotas/v1/noxfile.py b/owl-bot-staging/google-cloud-quotas/v1/noxfile.py new file mode 100644 index 000000000000..cad1e6cd753d --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/noxfile.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-cloud-quotas' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.13" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): + """Run the unit test suite.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/cloudquotas_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + +@nox.session(python=ALL_PYTHON[-1]) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): + """Run the unit test suite against pre-release versions of dependencies.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/cloudquotas_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_create_quota_preference_async.py b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_create_quota_preference_async.py new file mode 100644 index 000000000000..0b1aa7374af6 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_create_quota_preference_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateQuotaPreference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_CreateQuotaPreference_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +async def sample_create_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + + request = cloudquotas_v1.CreateQuotaPreferenceRequest( + parent="parent_value", + quota_preference=quota_preference, + ) + + # Make the request + response = await client.create_quota_preference(request=request) + + # Handle the response + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_CreateQuotaPreference_async] diff --git a/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_create_quota_preference_sync.py b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_create_quota_preference_sync.py new file mode 100644 index 000000000000..39ea14c0fc55 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_create_quota_preference_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateQuotaPreference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_CreateQuotaPreference_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +def sample_create_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + + request = cloudquotas_v1.CreateQuotaPreferenceRequest( + parent="parent_value", + quota_preference=quota_preference, + ) + + # Make the request + response = client.create_quota_preference(request=request) + + # Handle the response + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_CreateQuotaPreference_sync] diff --git a/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_info_async.py b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_info_async.py new file mode 100644 index 000000000000..900e5802d2d3 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_info_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetQuotaInfo +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_GetQuotaInfo_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +async def sample_get_quota_info(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.GetQuotaInfoRequest( + name="name_value", + ) + + # Make the request + response = await client.get_quota_info(request=request) + + # Handle the response + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_GetQuotaInfo_async] diff --git a/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_info_sync.py b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_info_sync.py new file mode 100644 index 000000000000..c19d39c5463d --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_info_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetQuotaInfo +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_GetQuotaInfo_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +def sample_get_quota_info(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + request = cloudquotas_v1.GetQuotaInfoRequest( + name="name_value", + ) + + # Make the request + response = client.get_quota_info(request=request) + + # Handle the response + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_GetQuotaInfo_sync] diff --git a/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_preference_async.py b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_preference_async.py new file mode 100644 index 000000000000..4bfab812990d --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_preference_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetQuotaPreference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_GetQuotaPreference_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +async def sample_get_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.GetQuotaPreferenceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_quota_preference(request=request) + + # Handle the response + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_GetQuotaPreference_async] diff --git a/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_preference_sync.py b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_preference_sync.py new file mode 100644 index 000000000000..6e7916e6e241 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_preference_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetQuotaPreference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_GetQuotaPreference_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +def sample_get_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + request = cloudquotas_v1.GetQuotaPreferenceRequest( + name="name_value", + ) + + # Make the request + response = client.get_quota_preference(request=request) + + # Handle the response + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_GetQuotaPreference_sync] diff --git a/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_infos_async.py b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_infos_async.py new file mode 100644 index 000000000000..7419e713c04a --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_infos_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListQuotaInfos +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_ListQuotaInfos_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +async def sample_list_quota_infos(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaInfosRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_infos(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_ListQuotaInfos_async] diff --git a/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_infos_sync.py b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_infos_sync.py new file mode 100644 index 000000000000..367b27f88b54 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_infos_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListQuotaInfos +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_ListQuotaInfos_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +def sample_list_quota_infos(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaInfosRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_infos(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_ListQuotaInfos_sync] diff --git a/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_async.py b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_async.py new file mode 100644 index 000000000000..4b2155e02286 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListQuotaPreferences +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_ListQuotaPreferences_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +async def sample_list_quota_preferences(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaPreferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_preferences(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_ListQuotaPreferences_async] diff --git a/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_sync.py b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_sync.py new file mode 100644 index 000000000000..aa7ef4ab3e66 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListQuotaPreferences +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_ListQuotaPreferences_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +def sample_list_quota_preferences(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaPreferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_preferences(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_ListQuotaPreferences_sync] diff --git a/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_update_quota_preference_async.py b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_update_quota_preference_async.py new file mode 100644 index 000000000000..6b0d5c49ea26 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_update_quota_preference_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateQuotaPreference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_UpdateQuotaPreference_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +async def sample_update_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + + request = cloudquotas_v1.UpdateQuotaPreferenceRequest( + quota_preference=quota_preference, + ) + + # Make the request + response = await client.update_quota_preference(request=request) + + # Handle the response + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_UpdateQuotaPreference_async] diff --git a/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_update_quota_preference_sync.py b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_update_quota_preference_sync.py new file mode 100644 index 000000000000..d274dbfcd4c1 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_update_quota_preference_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateQuotaPreference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_UpdateQuotaPreference_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +def sample_update_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + + request = cloudquotas_v1.UpdateQuotaPreferenceRequest( + quota_preference=quota_preference, + ) + + # Make the request + response = client.update_quota_preference(request=request) + + # Handle the response + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_UpdateQuotaPreference_sync] diff --git a/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/snippet_metadata_google.api.cloudquotas.v1.json b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/snippet_metadata_google.api.cloudquotas.v1.json new file mode 100644 index 000000000000..5c9889ca79cf --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/samples/generated_samples/snippet_metadata_google.api.cloudquotas.v1.json @@ -0,0 +1,1005 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.api.cloudquotas.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-quotas", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient", + "shortName": "CloudQuotasAsyncClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient.create_quota_preference", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.CreateQuotaPreference", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "CreateQuotaPreference" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.CreateQuotaPreferenceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "quota_preference", + "type": "google.cloud.cloudquotas_v1.types.QuotaPreference" + }, + { + "name": "quota_preference_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaPreference", + "shortName": "create_quota_preference" + }, + "description": "Sample for CreateQuotaPreference", + "file": "cloudquotas_v1_generated_cloud_quotas_create_quota_preference_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_CreateQuotaPreference_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_create_quota_preference_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient", + "shortName": "CloudQuotasClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient.create_quota_preference", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.CreateQuotaPreference", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "CreateQuotaPreference" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.CreateQuotaPreferenceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "quota_preference", + "type": "google.cloud.cloudquotas_v1.types.QuotaPreference" + }, + { + "name": "quota_preference_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaPreference", + "shortName": "create_quota_preference" + }, + "description": "Sample for CreateQuotaPreference", + "file": "cloudquotas_v1_generated_cloud_quotas_create_quota_preference_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_CreateQuotaPreference_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_create_quota_preference_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient", + "shortName": "CloudQuotasAsyncClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient.get_quota_info", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.GetQuotaInfo", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "GetQuotaInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.GetQuotaInfoRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaInfo", + "shortName": "get_quota_info" + }, + "description": "Sample for GetQuotaInfo", + "file": "cloudquotas_v1_generated_cloud_quotas_get_quota_info_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_GetQuotaInfo_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_get_quota_info_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient", + "shortName": "CloudQuotasClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient.get_quota_info", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.GetQuotaInfo", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "GetQuotaInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.GetQuotaInfoRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaInfo", + "shortName": "get_quota_info" + }, + "description": "Sample for GetQuotaInfo", + "file": "cloudquotas_v1_generated_cloud_quotas_get_quota_info_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_GetQuotaInfo_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_get_quota_info_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient", + "shortName": "CloudQuotasAsyncClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient.get_quota_preference", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.GetQuotaPreference", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "GetQuotaPreference" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.GetQuotaPreferenceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaPreference", + "shortName": "get_quota_preference" + }, + "description": "Sample for GetQuotaPreference", + "file": "cloudquotas_v1_generated_cloud_quotas_get_quota_preference_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_GetQuotaPreference_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_get_quota_preference_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient", + "shortName": "CloudQuotasClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient.get_quota_preference", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.GetQuotaPreference", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "GetQuotaPreference" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.GetQuotaPreferenceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaPreference", + "shortName": "get_quota_preference" + }, + "description": "Sample for GetQuotaPreference", + "file": "cloudquotas_v1_generated_cloud_quotas_get_quota_preference_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_GetQuotaPreference_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_get_quota_preference_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient", + "shortName": "CloudQuotasAsyncClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient.list_quota_infos", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.ListQuotaInfos", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "ListQuotaInfos" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.ListQuotaInfosRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaInfosAsyncPager", + "shortName": "list_quota_infos" + }, + "description": "Sample for ListQuotaInfos", + "file": "cloudquotas_v1_generated_cloud_quotas_list_quota_infos_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_ListQuotaInfos_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_list_quota_infos_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient", + "shortName": "CloudQuotasClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient.list_quota_infos", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.ListQuotaInfos", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "ListQuotaInfos" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.ListQuotaInfosRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaInfosPager", + "shortName": "list_quota_infos" + }, + "description": "Sample for ListQuotaInfos", + "file": "cloudquotas_v1_generated_cloud_quotas_list_quota_infos_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_ListQuotaInfos_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_list_quota_infos_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient", + "shortName": "CloudQuotasAsyncClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient.list_quota_preferences", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.ListQuotaPreferences", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "ListQuotaPreferences" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.ListQuotaPreferencesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaPreferencesAsyncPager", + "shortName": "list_quota_preferences" + }, + "description": "Sample for ListQuotaPreferences", + "file": "cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_ListQuotaPreferences_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient", + "shortName": "CloudQuotasClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient.list_quota_preferences", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.ListQuotaPreferences", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "ListQuotaPreferences" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.ListQuotaPreferencesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaPreferencesPager", + "shortName": "list_quota_preferences" + }, + "description": "Sample for ListQuotaPreferences", + "file": "cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_ListQuotaPreferences_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient", + "shortName": "CloudQuotasAsyncClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient.update_quota_preference", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.UpdateQuotaPreference", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "UpdateQuotaPreference" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.UpdateQuotaPreferenceRequest" + }, + { + "name": "quota_preference", + "type": "google.cloud.cloudquotas_v1.types.QuotaPreference" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaPreference", + "shortName": "update_quota_preference" + }, + "description": "Sample for UpdateQuotaPreference", + "file": "cloudquotas_v1_generated_cloud_quotas_update_quota_preference_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_UpdateQuotaPreference_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_update_quota_preference_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient", + "shortName": "CloudQuotasClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient.update_quota_preference", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.UpdateQuotaPreference", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "UpdateQuotaPreference" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.UpdateQuotaPreferenceRequest" + }, + { + "name": "quota_preference", + "type": "google.cloud.cloudquotas_v1.types.QuotaPreference" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaPreference", + "shortName": "update_quota_preference" + }, + "description": "Sample for UpdateQuotaPreference", + "file": "cloudquotas_v1_generated_cloud_quotas_update_quota_preference_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_UpdateQuotaPreference_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_update_quota_preference_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-cloud-quotas/v1/scripts/fixup_cloudquotas_v1_keywords.py b/owl-bot-staging/google-cloud-quotas/v1/scripts/fixup_cloudquotas_v1_keywords.py new file mode 100644 index 000000000000..cbd5ea81f8ee --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/scripts/fixup_cloudquotas_v1_keywords.py @@ -0,0 +1,181 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class cloudquotasCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_quota_preference': ('parent', 'quota_preference', 'quota_preference_id', 'ignore_safety_checks', ), + 'get_quota_info': ('name', ), + 'get_quota_preference': ('name', ), + 'list_quota_infos': ('parent', 'page_size', 'page_token', ), + 'list_quota_preferences': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_quota_preference': ('quota_preference', 'update_mask', 'allow_missing', 'validate_only', 'ignore_safety_checks', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=cloudquotasCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the cloudquotas client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-quotas/v1/setup.py b/owl-bot-staging/google-cloud-quotas/v1/setup.py new file mode 100644 index 000000000000..7bcc831ee04f --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/setup.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-quotas' + + +description = "Google Cloud Quotas API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/cloudquotas/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-quotas" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.13.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-quotas/v1/tests/__init__.py b/owl-bot-staging/google-cloud-quotas/v1/tests/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-quotas/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-quotas/v1/tests/unit/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-quotas/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-quotas/v1/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-quotas/v1/tests/unit/gapic/cloudquotas_v1/__init__.py b/owl-bot-staging/google-cloud-quotas/v1/tests/unit/gapic/cloudquotas_v1/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/tests/unit/gapic/cloudquotas_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-quotas/v1/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py b/owl-bot-staging/google-cloud-quotas/v1/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py new file mode 100644 index 000000000000..2669364c479a --- /dev/null +++ b/owl-bot-staging/google-cloud-quotas/v1/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py @@ -0,0 +1,6270 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.cloudquotas_v1.services.cloud_quotas import CloudQuotasAsyncClient +from google.cloud.cloudquotas_v1.services.cloud_quotas import CloudQuotasClient +from google.cloud.cloudquotas_v1.services.cloud_quotas import pagers +from google.cloud.cloudquotas_v1.services.cloud_quotas import transports +from google.cloud.cloudquotas_v1.types import cloudquotas +from google.cloud.cloudquotas_v1.types import resources +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudQuotasClient._get_default_mtls_endpoint(None) is None + assert CloudQuotasClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert CloudQuotasClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert CloudQuotasClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert CloudQuotasClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert CloudQuotasClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert CloudQuotasClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert CloudQuotasClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert CloudQuotasClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + CloudQuotasClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert CloudQuotasClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert CloudQuotasClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert CloudQuotasClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + CloudQuotasClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert CloudQuotasClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert CloudQuotasClient._get_client_cert_source(None, False) is None + assert CloudQuotasClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert CloudQuotasClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert CloudQuotasClient._get_client_cert_source(None, True) is mock_default_cert_source + assert CloudQuotasClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(CloudQuotasClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudQuotasClient)) +@mock.patch.object(CloudQuotasAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudQuotasAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = CloudQuotasClient._DEFAULT_UNIVERSE + default_endpoint = CloudQuotasClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = CloudQuotasClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert CloudQuotasClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert CloudQuotasClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == CloudQuotasClient.DEFAULT_MTLS_ENDPOINT + assert CloudQuotasClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert CloudQuotasClient._get_api_endpoint(None, None, default_universe, "always") == CloudQuotasClient.DEFAULT_MTLS_ENDPOINT + assert CloudQuotasClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == CloudQuotasClient.DEFAULT_MTLS_ENDPOINT + assert CloudQuotasClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert CloudQuotasClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + CloudQuotasClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert CloudQuotasClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert CloudQuotasClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert CloudQuotasClient._get_universe_domain(None, None) == CloudQuotasClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + CloudQuotasClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (CloudQuotasClient, "grpc"), + (CloudQuotasAsyncClient, "grpc_asyncio"), + (CloudQuotasClient, "rest"), +]) +def test_cloud_quotas_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'cloudquotas.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://cloudquotas.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.CloudQuotasGrpcTransport, "grpc"), + (transports.CloudQuotasGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudQuotasRestTransport, "rest"), +]) +def test_cloud_quotas_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (CloudQuotasClient, "grpc"), + (CloudQuotasAsyncClient, "grpc_asyncio"), + (CloudQuotasClient, "rest"), +]) +def test_cloud_quotas_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'cloudquotas.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://cloudquotas.googleapis.com' + ) + + +def test_cloud_quotas_client_get_transport_class(): + transport = CloudQuotasClient.get_transport_class() + available_transports = [ + transports.CloudQuotasGrpcTransport, + transports.CloudQuotasRestTransport, + ] + assert transport in available_transports + + transport = CloudQuotasClient.get_transport_class("grpc") + assert transport == transports.CloudQuotasGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudQuotasClient, transports.CloudQuotasGrpcTransport, "grpc"), + (CloudQuotasAsyncClient, transports.CloudQuotasGrpcAsyncIOTransport, "grpc_asyncio"), + (CloudQuotasClient, transports.CloudQuotasRestTransport, "rest"), +]) +@mock.patch.object(CloudQuotasClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudQuotasClient)) +@mock.patch.object(CloudQuotasAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudQuotasAsyncClient)) +def test_cloud_quotas_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudQuotasClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudQuotasClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (CloudQuotasClient, transports.CloudQuotasGrpcTransport, "grpc", "true"), + (CloudQuotasAsyncClient, transports.CloudQuotasGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (CloudQuotasClient, transports.CloudQuotasGrpcTransport, "grpc", "false"), + (CloudQuotasAsyncClient, transports.CloudQuotasGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (CloudQuotasClient, transports.CloudQuotasRestTransport, "rest", "true"), + (CloudQuotasClient, transports.CloudQuotasRestTransport, "rest", "false"), +]) +@mock.patch.object(CloudQuotasClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudQuotasClient)) +@mock.patch.object(CloudQuotasAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudQuotasAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_quotas_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + CloudQuotasClient, CloudQuotasAsyncClient +]) +@mock.patch.object(CloudQuotasClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudQuotasClient)) +@mock.patch.object(CloudQuotasAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudQuotasAsyncClient)) +def test_cloud_quotas_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + CloudQuotasClient, CloudQuotasAsyncClient +]) +@mock.patch.object(CloudQuotasClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudQuotasClient)) +@mock.patch.object(CloudQuotasAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudQuotasAsyncClient)) +def test_cloud_quotas_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = CloudQuotasClient._DEFAULT_UNIVERSE + default_endpoint = CloudQuotasClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = CloudQuotasClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudQuotasClient, transports.CloudQuotasGrpcTransport, "grpc"), + (CloudQuotasAsyncClient, transports.CloudQuotasGrpcAsyncIOTransport, "grpc_asyncio"), + (CloudQuotasClient, transports.CloudQuotasRestTransport, "rest"), +]) +def test_cloud_quotas_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CloudQuotasClient, transports.CloudQuotasGrpcTransport, "grpc", grpc_helpers), + (CloudQuotasAsyncClient, transports.CloudQuotasGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (CloudQuotasClient, transports.CloudQuotasRestTransport, "rest", None), +]) +def test_cloud_quotas_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_cloud_quotas_client_client_options_from_dict(): + with mock.patch('google.cloud.cloudquotas_v1.services.cloud_quotas.transports.CloudQuotasGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = CloudQuotasClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CloudQuotasClient, transports.CloudQuotasGrpcTransport, "grpc", grpc_helpers), + (CloudQuotasAsyncClient, transports.CloudQuotasGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_cloud_quotas_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudquotas.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="cloudquotas.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + cloudquotas.ListQuotaInfosRequest, + dict, +]) +def test_list_quota_infos(request_type, transport: str = 'grpc'): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudquotas.ListQuotaInfosResponse( + next_page_token='next_page_token_value', + ) + response = client.list_quota_infos(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloudquotas.ListQuotaInfosRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQuotaInfosPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_quota_infos_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloudquotas.ListQuotaInfosRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_quota_infos(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.ListQuotaInfosRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_quota_infos_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_quota_infos in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_quota_infos] = mock_rpc + request = {} + client.list_quota_infos(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_quota_infos(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_quota_infos_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_quota_infos in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_quota_infos] = mock_rpc + + request = {} + await client.list_quota_infos(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_quota_infos(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_quota_infos_async(transport: str = 'grpc_asyncio', request_type=cloudquotas.ListQuotaInfosRequest): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudquotas.ListQuotaInfosResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_quota_infos(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloudquotas.ListQuotaInfosRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQuotaInfosAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_quota_infos_async_from_dict(): + await test_list_quota_infos_async(request_type=dict) + +def test_list_quota_infos_field_headers(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.ListQuotaInfosRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), + '__call__') as call: + call.return_value = cloudquotas.ListQuotaInfosResponse() + client.list_quota_infos(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_quota_infos_field_headers_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.ListQuotaInfosRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudquotas.ListQuotaInfosResponse()) + await client.list_quota_infos(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_quota_infos_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudquotas.ListQuotaInfosResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_quota_infos( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_quota_infos_flattened_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_quota_infos( + cloudquotas.ListQuotaInfosRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_quota_infos_flattened_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudquotas.ListQuotaInfosResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudquotas.ListQuotaInfosResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_quota_infos( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_quota_infos_flattened_error_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_quota_infos( + cloudquotas.ListQuotaInfosRequest(), + parent='parent_value', + ) + + +def test_list_quota_infos_pager(transport_name: str = "grpc"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + next_page_token='abc', + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[], + next_page_token='def', + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + ], + next_page_token='ghi', + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_quota_infos(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.QuotaInfo) + for i in results) +def test_list_quota_infos_pages(transport_name: str = "grpc"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + next_page_token='abc', + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[], + next_page_token='def', + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + ], + next_page_token='ghi', + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + ), + RuntimeError, + ) + pages = list(client.list_quota_infos(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_quota_infos_async_pager(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + next_page_token='abc', + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[], + next_page_token='def', + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + ], + next_page_token='ghi', + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_quota_infos(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.QuotaInfo) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_quota_infos_async_pages(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + next_page_token='abc', + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[], + next_page_token='def', + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + ], + next_page_token='ghi', + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_quota_infos(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + cloudquotas.GetQuotaInfoRequest, + dict, +]) +def test_get_quota_info(request_type, transport: str = 'grpc'): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_info), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaInfo( + name='name_value', + quota_id='quota_id_value', + metric='metric_value', + service='service_value', + is_precise=True, + refresh_interval='refresh_interval_value', + container_type=resources.QuotaInfo.ContainerType.PROJECT, + dimensions=['dimensions_value'], + metric_display_name='metric_display_name_value', + quota_display_name='quota_display_name_value', + metric_unit='metric_unit_value', + is_fixed=True, + is_concurrent=True, + service_request_quota_uri='service_request_quota_uri_value', + ) + response = client.get_quota_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloudquotas.GetQuotaInfoRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaInfo) + assert response.name == 'name_value' + assert response.quota_id == 'quota_id_value' + assert response.metric == 'metric_value' + assert response.service == 'service_value' + assert response.is_precise is True + assert response.refresh_interval == 'refresh_interval_value' + assert response.container_type == resources.QuotaInfo.ContainerType.PROJECT + assert response.dimensions == ['dimensions_value'] + assert response.metric_display_name == 'metric_display_name_value' + assert response.quota_display_name == 'quota_display_name_value' + assert response.metric_unit == 'metric_unit_value' + assert response.is_fixed is True + assert response.is_concurrent is True + assert response.service_request_quota_uri == 'service_request_quota_uri_value' + + +def test_get_quota_info_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloudquotas.GetQuotaInfoRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_info), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_quota_info(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.GetQuotaInfoRequest( + name='name_value', + ) + +def test_get_quota_info_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_quota_info in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_quota_info] = mock_rpc + request = {} + client.get_quota_info(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_quota_info(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_quota_info_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_quota_info in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_quota_info] = mock_rpc + + request = {} + await client.get_quota_info(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_quota_info(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_quota_info_async(transport: str = 'grpc_asyncio', request_type=cloudquotas.GetQuotaInfoRequest): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_info), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaInfo( + name='name_value', + quota_id='quota_id_value', + metric='metric_value', + service='service_value', + is_precise=True, + refresh_interval='refresh_interval_value', + container_type=resources.QuotaInfo.ContainerType.PROJECT, + dimensions=['dimensions_value'], + metric_display_name='metric_display_name_value', + quota_display_name='quota_display_name_value', + metric_unit='metric_unit_value', + is_fixed=True, + is_concurrent=True, + service_request_quota_uri='service_request_quota_uri_value', + )) + response = await client.get_quota_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloudquotas.GetQuotaInfoRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaInfo) + assert response.name == 'name_value' + assert response.quota_id == 'quota_id_value' + assert response.metric == 'metric_value' + assert response.service == 'service_value' + assert response.is_precise is True + assert response.refresh_interval == 'refresh_interval_value' + assert response.container_type == resources.QuotaInfo.ContainerType.PROJECT + assert response.dimensions == ['dimensions_value'] + assert response.metric_display_name == 'metric_display_name_value' + assert response.quota_display_name == 'quota_display_name_value' + assert response.metric_unit == 'metric_unit_value' + assert response.is_fixed is True + assert response.is_concurrent is True + assert response.service_request_quota_uri == 'service_request_quota_uri_value' + + +@pytest.mark.asyncio +async def test_get_quota_info_async_from_dict(): + await test_get_quota_info_async(request_type=dict) + +def test_get_quota_info_field_headers(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.GetQuotaInfoRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_info), + '__call__') as call: + call.return_value = resources.QuotaInfo() + client.get_quota_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_quota_info_field_headers_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.GetQuotaInfoRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_info), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaInfo()) + await client.get_quota_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_quota_info_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_info), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaInfo() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_quota_info( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_quota_info_flattened_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_quota_info( + cloudquotas.GetQuotaInfoRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_quota_info_flattened_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_info), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaInfo() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaInfo()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_quota_info( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_quota_info_flattened_error_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_quota_info( + cloudquotas.GetQuotaInfoRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudquotas.ListQuotaPreferencesRequest, + dict, +]) +def test_list_quota_preferences(request_type, transport: str = 'grpc'): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudquotas.ListQuotaPreferencesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_quota_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloudquotas.ListQuotaPreferencesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQuotaPreferencesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_quota_preferences_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloudquotas.ListQuotaPreferencesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_quota_preferences(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.ListQuotaPreferencesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_quota_preferences_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_quota_preferences in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_quota_preferences] = mock_rpc + request = {} + client.list_quota_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_quota_preferences(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_quota_preferences_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_quota_preferences in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_quota_preferences] = mock_rpc + + request = {} + await client.list_quota_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_quota_preferences(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_quota_preferences_async(transport: str = 'grpc_asyncio', request_type=cloudquotas.ListQuotaPreferencesRequest): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudquotas.ListQuotaPreferencesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_quota_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloudquotas.ListQuotaPreferencesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQuotaPreferencesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_quota_preferences_async_from_dict(): + await test_list_quota_preferences_async(request_type=dict) + +def test_list_quota_preferences_field_headers(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.ListQuotaPreferencesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + '__call__') as call: + call.return_value = cloudquotas.ListQuotaPreferencesResponse() + client.list_quota_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_quota_preferences_field_headers_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.ListQuotaPreferencesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudquotas.ListQuotaPreferencesResponse()) + await client.list_quota_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_quota_preferences_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudquotas.ListQuotaPreferencesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_quota_preferences( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_quota_preferences_flattened_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_quota_preferences( + cloudquotas.ListQuotaPreferencesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_quota_preferences_flattened_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudquotas.ListQuotaPreferencesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudquotas.ListQuotaPreferencesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_quota_preferences( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_quota_preferences_flattened_error_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_quota_preferences( + cloudquotas.ListQuotaPreferencesRequest(), + parent='parent_value', + ) + + +def test_list_quota_preferences_pager(transport_name: str = "grpc"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + next_page_token='abc', + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[], + next_page_token='def', + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + ], + next_page_token='ghi', + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_quota_preferences(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.QuotaPreference) + for i in results) +def test_list_quota_preferences_pages(transport_name: str = "grpc"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + next_page_token='abc', + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[], + next_page_token='def', + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + ], + next_page_token='ghi', + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + ), + RuntimeError, + ) + pages = list(client.list_quota_preferences(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_quota_preferences_async_pager(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + next_page_token='abc', + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[], + next_page_token='def', + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + ], + next_page_token='ghi', + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_quota_preferences(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.QuotaPreference) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_quota_preferences_async_pages(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + next_page_token='abc', + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[], + next_page_token='def', + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + ], + next_page_token='ghi', + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_quota_preferences(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + cloudquotas.GetQuotaPreferenceRequest, + dict, +]) +def test_get_quota_preference(request_type, transport: str = 'grpc'): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference( + name='name_value', + etag='etag_value', + service='service_value', + quota_id='quota_id_value', + reconciling=True, + justification='justification_value', + contact_email='contact_email_value', + ) + response = client.get_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloudquotas.GetQuotaPreferenceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == 'name_value' + assert response.etag == 'etag_value' + assert response.service == 'service_value' + assert response.quota_id == 'quota_id_value' + assert response.reconciling is True + assert response.justification == 'justification_value' + assert response.contact_email == 'contact_email_value' + + +def test_get_quota_preference_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloudquotas.GetQuotaPreferenceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_quota_preference(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.GetQuotaPreferenceRequest( + name='name_value', + ) + +def test_get_quota_preference_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_quota_preference in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_quota_preference] = mock_rpc + request = {} + client.get_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_quota_preference(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_quota_preference_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_quota_preference in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_quota_preference] = mock_rpc + + request = {} + await client.get_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_quota_preference(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_quota_preference_async(transport: str = 'grpc_asyncio', request_type=cloudquotas.GetQuotaPreferenceRequest): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaPreference( + name='name_value', + etag='etag_value', + service='service_value', + quota_id='quota_id_value', + reconciling=True, + justification='justification_value', + contact_email='contact_email_value', + )) + response = await client.get_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloudquotas.GetQuotaPreferenceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == 'name_value' + assert response.etag == 'etag_value' + assert response.service == 'service_value' + assert response.quota_id == 'quota_id_value' + assert response.reconciling is True + assert response.justification == 'justification_value' + assert response.contact_email == 'contact_email_value' + + +@pytest.mark.asyncio +async def test_get_quota_preference_async_from_dict(): + await test_get_quota_preference_async(request_type=dict) + +def test_get_quota_preference_field_headers(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.GetQuotaPreferenceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), + '__call__') as call: + call.return_value = resources.QuotaPreference() + client.get_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_quota_preference_field_headers_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.GetQuotaPreferenceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaPreference()) + await client.get_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_quota_preference_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_quota_preference( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_quota_preference_flattened_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_quota_preference( + cloudquotas.GetQuotaPreferenceRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_quota_preference_flattened_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaPreference()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_quota_preference( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_quota_preference_flattened_error_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_quota_preference( + cloudquotas.GetQuotaPreferenceRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudquotas.CreateQuotaPreferenceRequest, + dict, +]) +def test_create_quota_preference(request_type, transport: str = 'grpc'): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference( + name='name_value', + etag='etag_value', + service='service_value', + quota_id='quota_id_value', + reconciling=True, + justification='justification_value', + contact_email='contact_email_value', + ) + response = client.create_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloudquotas.CreateQuotaPreferenceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == 'name_value' + assert response.etag == 'etag_value' + assert response.service == 'service_value' + assert response.quota_id == 'quota_id_value' + assert response.reconciling is True + assert response.justification == 'justification_value' + assert response.contact_email == 'contact_email_value' + + +def test_create_quota_preference_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloudquotas.CreateQuotaPreferenceRequest( + parent='parent_value', + quota_preference_id='quota_preference_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_quota_preference(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.CreateQuotaPreferenceRequest( + parent='parent_value', + quota_preference_id='quota_preference_id_value', + ) + +def test_create_quota_preference_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_quota_preference in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_quota_preference] = mock_rpc + request = {} + client.create_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_quota_preference(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_quota_preference_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_quota_preference in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_quota_preference] = mock_rpc + + request = {} + await client.create_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_quota_preference(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_quota_preference_async(transport: str = 'grpc_asyncio', request_type=cloudquotas.CreateQuotaPreferenceRequest): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaPreference( + name='name_value', + etag='etag_value', + service='service_value', + quota_id='quota_id_value', + reconciling=True, + justification='justification_value', + contact_email='contact_email_value', + )) + response = await client.create_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloudquotas.CreateQuotaPreferenceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == 'name_value' + assert response.etag == 'etag_value' + assert response.service == 'service_value' + assert response.quota_id == 'quota_id_value' + assert response.reconciling is True + assert response.justification == 'justification_value' + assert response.contact_email == 'contact_email_value' + + +@pytest.mark.asyncio +async def test_create_quota_preference_async_from_dict(): + await test_create_quota_preference_async(request_type=dict) + +def test_create_quota_preference_field_headers(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.CreateQuotaPreferenceRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), + '__call__') as call: + call.return_value = resources.QuotaPreference() + client.create_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_quota_preference_field_headers_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.CreateQuotaPreferenceRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaPreference()) + await client.create_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_quota_preference_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_quota_preference( + parent='parent_value', + quota_preference=resources.QuotaPreference(name='name_value'), + quota_preference_id='quota_preference_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].quota_preference + mock_val = resources.QuotaPreference(name='name_value') + assert arg == mock_val + arg = args[0].quota_preference_id + mock_val = 'quota_preference_id_value' + assert arg == mock_val + + +def test_create_quota_preference_flattened_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_quota_preference( + cloudquotas.CreateQuotaPreferenceRequest(), + parent='parent_value', + quota_preference=resources.QuotaPreference(name='name_value'), + quota_preference_id='quota_preference_id_value', + ) + +@pytest.mark.asyncio +async def test_create_quota_preference_flattened_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaPreference()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_quota_preference( + parent='parent_value', + quota_preference=resources.QuotaPreference(name='name_value'), + quota_preference_id='quota_preference_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].quota_preference + mock_val = resources.QuotaPreference(name='name_value') + assert arg == mock_val + arg = args[0].quota_preference_id + mock_val = 'quota_preference_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_quota_preference_flattened_error_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_quota_preference( + cloudquotas.CreateQuotaPreferenceRequest(), + parent='parent_value', + quota_preference=resources.QuotaPreference(name='name_value'), + quota_preference_id='quota_preference_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudquotas.UpdateQuotaPreferenceRequest, + dict, +]) +def test_update_quota_preference(request_type, transport: str = 'grpc'): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference( + name='name_value', + etag='etag_value', + service='service_value', + quota_id='quota_id_value', + reconciling=True, + justification='justification_value', + contact_email='contact_email_value', + ) + response = client.update_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloudquotas.UpdateQuotaPreferenceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == 'name_value' + assert response.etag == 'etag_value' + assert response.service == 'service_value' + assert response.quota_id == 'quota_id_value' + assert response.reconciling is True + assert response.justification == 'justification_value' + assert response.contact_email == 'contact_email_value' + + +def test_update_quota_preference_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloudquotas.UpdateQuotaPreferenceRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_quota_preference(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.UpdateQuotaPreferenceRequest( + ) + +def test_update_quota_preference_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_quota_preference in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_quota_preference] = mock_rpc + request = {} + client.update_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_quota_preference(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_quota_preference_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_quota_preference in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_quota_preference] = mock_rpc + + request = {} + await client.update_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_quota_preference(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_quota_preference_async(transport: str = 'grpc_asyncio', request_type=cloudquotas.UpdateQuotaPreferenceRequest): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaPreference( + name='name_value', + etag='etag_value', + service='service_value', + quota_id='quota_id_value', + reconciling=True, + justification='justification_value', + contact_email='contact_email_value', + )) + response = await client.update_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloudquotas.UpdateQuotaPreferenceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == 'name_value' + assert response.etag == 'etag_value' + assert response.service == 'service_value' + assert response.quota_id == 'quota_id_value' + assert response.reconciling is True + assert response.justification == 'justification_value' + assert response.contact_email == 'contact_email_value' + + +@pytest.mark.asyncio +async def test_update_quota_preference_async_from_dict(): + await test_update_quota_preference_async(request_type=dict) + +def test_update_quota_preference_field_headers(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.UpdateQuotaPreferenceRequest() + + request.quota_preference.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), + '__call__') as call: + call.return_value = resources.QuotaPreference() + client.update_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'quota_preference.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_quota_preference_field_headers_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.UpdateQuotaPreferenceRequest() + + request.quota_preference.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaPreference()) + await client.update_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'quota_preference.name=name_value', + ) in kw['metadata'] + + +def test_update_quota_preference_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_quota_preference( + quota_preference=resources.QuotaPreference(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].quota_preference + mock_val = resources.QuotaPreference(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_quota_preference_flattened_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_quota_preference( + cloudquotas.UpdateQuotaPreferenceRequest(), + quota_preference=resources.QuotaPreference(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_quota_preference_flattened_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaPreference()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_quota_preference( + quota_preference=resources.QuotaPreference(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].quota_preference + mock_val = resources.QuotaPreference(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_quota_preference_flattened_error_async(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_quota_preference( + cloudquotas.UpdateQuotaPreferenceRequest(), + quota_preference=resources.QuotaPreference(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_list_quota_infos_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_quota_infos in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_quota_infos] = mock_rpc + + request = {} + client.list_quota_infos(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_quota_infos(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_quota_infos_rest_required_fields(request_type=cloudquotas.ListQuotaInfosRequest): + transport_class = transports.CloudQuotasRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_quota_infos._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_quota_infos._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudquotas.ListQuotaInfosResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloudquotas.ListQuotaInfosResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_quota_infos(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_quota_infos_rest_unset_required_fields(): + transport = transports.CloudQuotasRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_quota_infos._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_quota_infos_rest_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudquotas.ListQuotaInfosResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/services/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloudquotas.ListQuotaInfosResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_quota_infos(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/services/*}/quotaInfos" % client.transport._host, args[1]) + + +def test_list_quota_infos_rest_flattened_error(transport: str = 'rest'): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_quota_infos( + cloudquotas.ListQuotaInfosRequest(), + parent='parent_value', + ) + + +def test_list_quota_infos_rest_pager(transport: str = 'rest'): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + next_page_token='abc', + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[], + next_page_token='def', + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + ], + next_page_token='ghi', + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudquotas.ListQuotaInfosResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/services/sample3'} + + pager = client.list_quota_infos(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.QuotaInfo) + for i in results) + + pages = list(client.list_quota_infos(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_quota_info_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_quota_info in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_quota_info] = mock_rpc + + request = {} + client.get_quota_info(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_quota_info(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_quota_info_rest_required_fields(request_type=cloudquotas.GetQuotaInfoRequest): + transport_class = transports.CloudQuotasRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_quota_info._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_quota_info._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.QuotaInfo() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.QuotaInfo.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_quota_info(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_quota_info_rest_unset_required_fields(): + transport = transports.CloudQuotasRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_quota_info._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_quota_info_rest_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaInfo() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/services/sample3/quotaInfos/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.QuotaInfo.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_quota_info(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/services/*/quotaInfos/*}" % client.transport._host, args[1]) + + +def test_get_quota_info_rest_flattened_error(transport: str = 'rest'): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_quota_info( + cloudquotas.GetQuotaInfoRequest(), + name='name_value', + ) + + +def test_list_quota_preferences_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_quota_preferences in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_quota_preferences] = mock_rpc + + request = {} + client.list_quota_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_quota_preferences(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_quota_preferences_rest_required_fields(request_type=cloudquotas.ListQuotaPreferencesRequest): + transport_class = transports.CloudQuotasRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_quota_preferences._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_quota_preferences._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudquotas.ListQuotaPreferencesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloudquotas.ListQuotaPreferencesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_quota_preferences(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_quota_preferences_rest_unset_required_fields(): + transport = transports.CloudQuotasRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_quota_preferences._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_quota_preferences_rest_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudquotas.ListQuotaPreferencesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloudquotas.ListQuotaPreferencesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_quota_preferences(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/quotaPreferences" % client.transport._host, args[1]) + + +def test_list_quota_preferences_rest_flattened_error(transport: str = 'rest'): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_quota_preferences( + cloudquotas.ListQuotaPreferencesRequest(), + parent='parent_value', + ) + + +def test_list_quota_preferences_rest_pager(transport: str = 'rest'): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + next_page_token='abc', + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[], + next_page_token='def', + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + ], + next_page_token='ghi', + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudquotas.ListQuotaPreferencesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_quota_preferences(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.QuotaPreference) + for i in results) + + pages = list(client.list_quota_preferences(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_quota_preference_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_quota_preference in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_quota_preference] = mock_rpc + + request = {} + client.get_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_quota_preference(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_quota_preference_rest_required_fields(request_type=cloudquotas.GetQuotaPreferenceRequest): + transport_class = transports.CloudQuotasRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_quota_preference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_quota_preference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_quota_preference(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_quota_preference_rest_unset_required_fields(): + transport = transports.CloudQuotasRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_quota_preference._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_quota_preference_rest_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/quotaPreferences/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_quota_preference(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/quotaPreferences/*}" % client.transport._host, args[1]) + + +def test_get_quota_preference_rest_flattened_error(transport: str = 'rest'): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_quota_preference( + cloudquotas.GetQuotaPreferenceRequest(), + name='name_value', + ) + + +def test_create_quota_preference_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_quota_preference in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_quota_preference] = mock_rpc + + request = {} + client.create_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_quota_preference(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_quota_preference_rest_required_fields(request_type=cloudquotas.CreateQuotaPreferenceRequest): + transport_class = transports.CloudQuotasRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_quota_preference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_quota_preference._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("ignore_safety_checks", "quota_preference_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_quota_preference(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_quota_preference_rest_unset_required_fields(): + transport = transports.CloudQuotasRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_quota_preference._get_unset_required_fields({}) + assert set(unset_fields) == (set(("ignoreSafetyChecks", "quotaPreferenceId", )) & set(("parent", "quotaPreference", ))) + + +def test_create_quota_preference_rest_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + quota_preference=resources.QuotaPreference(name='name_value'), + quota_preference_id='quota_preference_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_quota_preference(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/quotaPreferences" % client.transport._host, args[1]) + + +def test_create_quota_preference_rest_flattened_error(transport: str = 'rest'): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_quota_preference( + cloudquotas.CreateQuotaPreferenceRequest(), + parent='parent_value', + quota_preference=resources.QuotaPreference(name='name_value'), + quota_preference_id='quota_preference_id_value', + ) + + +def test_update_quota_preference_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_quota_preference in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_quota_preference] = mock_rpc + + request = {} + client.update_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_quota_preference(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_quota_preference_rest_required_fields(request_type=cloudquotas.UpdateQuotaPreferenceRequest): + transport_class = transports.CloudQuotasRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_quota_preference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_quota_preference._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "ignore_safety_checks", "update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_quota_preference(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_quota_preference_rest_unset_required_fields(): + transport = transports.CloudQuotasRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_quota_preference._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "ignoreSafetyChecks", "updateMask", "validateOnly", )) & set(("quotaPreference", ))) + + +def test_update_quota_preference_rest_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference() + + # get arguments that satisfy an http rule for this method + sample_request = {'quota_preference': {'name': 'projects/sample1/locations/sample2/quotaPreferences/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + quota_preference=resources.QuotaPreference(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_quota_preference(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{quota_preference.name=projects/*/locations/*/quotaPreferences/*}" % client.transport._host, args[1]) + + +def test_update_quota_preference_rest_flattened_error(transport: str = 'rest'): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_quota_preference( + cloudquotas.UpdateQuotaPreferenceRequest(), + quota_preference=resources.QuotaPreference(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudQuotasGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudQuotasGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudQuotasClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudQuotasGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudQuotasClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudQuotasClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudQuotasGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudQuotasClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudQuotasGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudQuotasClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudQuotasGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudQuotasGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.CloudQuotasGrpcTransport, + transports.CloudQuotasGrpcAsyncIOTransport, + transports.CloudQuotasRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = CloudQuotasClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_quota_infos_empty_call_grpc(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), + '__call__') as call: + call.return_value = cloudquotas.ListQuotaInfosResponse() + client.list_quota_infos(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.ListQuotaInfosRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_quota_info_empty_call_grpc(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_info), + '__call__') as call: + call.return_value = resources.QuotaInfo() + client.get_quota_info(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.GetQuotaInfoRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_quota_preferences_empty_call_grpc(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + '__call__') as call: + call.return_value = cloudquotas.ListQuotaPreferencesResponse() + client.list_quota_preferences(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.ListQuotaPreferencesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_quota_preference_empty_call_grpc(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), + '__call__') as call: + call.return_value = resources.QuotaPreference() + client.get_quota_preference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.GetQuotaPreferenceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_quota_preference_empty_call_grpc(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), + '__call__') as call: + call.return_value = resources.QuotaPreference() + client.create_quota_preference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.CreateQuotaPreferenceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_quota_preference_empty_call_grpc(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), + '__call__') as call: + call.return_value = resources.QuotaPreference() + client.update_quota_preference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.UpdateQuotaPreferenceRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = CloudQuotasAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_quota_infos_empty_call_grpc_asyncio(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudquotas.ListQuotaInfosResponse( + next_page_token='next_page_token_value', + )) + await client.list_quota_infos(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.ListQuotaInfosRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_quota_info_empty_call_grpc_asyncio(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_info), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaInfo( + name='name_value', + quota_id='quota_id_value', + metric='metric_value', + service='service_value', + is_precise=True, + refresh_interval='refresh_interval_value', + container_type=resources.QuotaInfo.ContainerType.PROJECT, + dimensions=['dimensions_value'], + metric_display_name='metric_display_name_value', + quota_display_name='quota_display_name_value', + metric_unit='metric_unit_value', + is_fixed=True, + is_concurrent=True, + service_request_quota_uri='service_request_quota_uri_value', + )) + await client.get_quota_info(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.GetQuotaInfoRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_quota_preferences_empty_call_grpc_asyncio(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudquotas.ListQuotaPreferencesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_quota_preferences(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.ListQuotaPreferencesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_quota_preference_empty_call_grpc_asyncio(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaPreference( + name='name_value', + etag='etag_value', + service='service_value', + quota_id='quota_id_value', + reconciling=True, + justification='justification_value', + contact_email='contact_email_value', + )) + await client.get_quota_preference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.GetQuotaPreferenceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_quota_preference_empty_call_grpc_asyncio(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaPreference( + name='name_value', + etag='etag_value', + service='service_value', + quota_id='quota_id_value', + reconciling=True, + justification='justification_value', + contact_email='contact_email_value', + )) + await client.create_quota_preference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.CreateQuotaPreferenceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_quota_preference_empty_call_grpc_asyncio(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaPreference( + name='name_value', + etag='etag_value', + service='service_value', + quota_id='quota_id_value', + reconciling=True, + justification='justification_value', + contact_email='contact_email_value', + )) + await client.update_quota_preference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.UpdateQuotaPreferenceRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = CloudQuotasClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_quota_infos_rest_bad_request(request_type=cloudquotas.ListQuotaInfosRequest): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/services/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_quota_infos(request) + + +@pytest.mark.parametrize("request_type", [ + cloudquotas.ListQuotaInfosRequest, + dict, +]) +def test_list_quota_infos_rest_call_success(request_type): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/services/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudquotas.ListQuotaInfosResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloudquotas.ListQuotaInfosResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_quota_infos(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQuotaInfosPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_quota_infos_rest_interceptors(null_interceptor): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudQuotasRestInterceptor(), + ) + client = CloudQuotasClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudQuotasRestInterceptor, "post_list_quota_infos") as post, \ + mock.patch.object(transports.CloudQuotasRestInterceptor, "pre_list_quota_infos") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudquotas.ListQuotaInfosRequest.pb(cloudquotas.ListQuotaInfosRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = cloudquotas.ListQuotaInfosResponse.to_json(cloudquotas.ListQuotaInfosResponse()) + req.return_value.content = return_value + + request = cloudquotas.ListQuotaInfosRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudquotas.ListQuotaInfosResponse() + + client.list_quota_infos(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_quota_info_rest_bad_request(request_type=cloudquotas.GetQuotaInfoRequest): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/services/sample3/quotaInfos/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_quota_info(request) + + +@pytest.mark.parametrize("request_type", [ + cloudquotas.GetQuotaInfoRequest, + dict, +]) +def test_get_quota_info_rest_call_success(request_type): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/services/sample3/quotaInfos/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaInfo( + name='name_value', + quota_id='quota_id_value', + metric='metric_value', + service='service_value', + is_precise=True, + refresh_interval='refresh_interval_value', + container_type=resources.QuotaInfo.ContainerType.PROJECT, + dimensions=['dimensions_value'], + metric_display_name='metric_display_name_value', + quota_display_name='quota_display_name_value', + metric_unit='metric_unit_value', + is_fixed=True, + is_concurrent=True, + service_request_quota_uri='service_request_quota_uri_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.QuotaInfo.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_quota_info(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaInfo) + assert response.name == 'name_value' + assert response.quota_id == 'quota_id_value' + assert response.metric == 'metric_value' + assert response.service == 'service_value' + assert response.is_precise is True + assert response.refresh_interval == 'refresh_interval_value' + assert response.container_type == resources.QuotaInfo.ContainerType.PROJECT + assert response.dimensions == ['dimensions_value'] + assert response.metric_display_name == 'metric_display_name_value' + assert response.quota_display_name == 'quota_display_name_value' + assert response.metric_unit == 'metric_unit_value' + assert response.is_fixed is True + assert response.is_concurrent is True + assert response.service_request_quota_uri == 'service_request_quota_uri_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_quota_info_rest_interceptors(null_interceptor): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudQuotasRestInterceptor(), + ) + client = CloudQuotasClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudQuotasRestInterceptor, "post_get_quota_info") as post, \ + mock.patch.object(transports.CloudQuotasRestInterceptor, "pre_get_quota_info") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudquotas.GetQuotaInfoRequest.pb(cloudquotas.GetQuotaInfoRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = resources.QuotaInfo.to_json(resources.QuotaInfo()) + req.return_value.content = return_value + + request = cloudquotas.GetQuotaInfoRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.QuotaInfo() + + client.get_quota_info(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_quota_preferences_rest_bad_request(request_type=cloudquotas.ListQuotaPreferencesRequest): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_quota_preferences(request) + + +@pytest.mark.parametrize("request_type", [ + cloudquotas.ListQuotaPreferencesRequest, + dict, +]) +def test_list_quota_preferences_rest_call_success(request_type): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudquotas.ListQuotaPreferencesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloudquotas.ListQuotaPreferencesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_quota_preferences(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQuotaPreferencesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_quota_preferences_rest_interceptors(null_interceptor): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudQuotasRestInterceptor(), + ) + client = CloudQuotasClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudQuotasRestInterceptor, "post_list_quota_preferences") as post, \ + mock.patch.object(transports.CloudQuotasRestInterceptor, "pre_list_quota_preferences") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudquotas.ListQuotaPreferencesRequest.pb(cloudquotas.ListQuotaPreferencesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = cloudquotas.ListQuotaPreferencesResponse.to_json(cloudquotas.ListQuotaPreferencesResponse()) + req.return_value.content = return_value + + request = cloudquotas.ListQuotaPreferencesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudquotas.ListQuotaPreferencesResponse() + + client.list_quota_preferences(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_quota_preference_rest_bad_request(request_type=cloudquotas.GetQuotaPreferenceRequest): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/quotaPreferences/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_quota_preference(request) + + +@pytest.mark.parametrize("request_type", [ + cloudquotas.GetQuotaPreferenceRequest, + dict, +]) +def test_get_quota_preference_rest_call_success(request_type): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/quotaPreferences/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference( + name='name_value', + etag='etag_value', + service='service_value', + quota_id='quota_id_value', + reconciling=True, + justification='justification_value', + contact_email='contact_email_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_quota_preference(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == 'name_value' + assert response.etag == 'etag_value' + assert response.service == 'service_value' + assert response.quota_id == 'quota_id_value' + assert response.reconciling is True + assert response.justification == 'justification_value' + assert response.contact_email == 'contact_email_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_quota_preference_rest_interceptors(null_interceptor): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudQuotasRestInterceptor(), + ) + client = CloudQuotasClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudQuotasRestInterceptor, "post_get_quota_preference") as post, \ + mock.patch.object(transports.CloudQuotasRestInterceptor, "pre_get_quota_preference") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudquotas.GetQuotaPreferenceRequest.pb(cloudquotas.GetQuotaPreferenceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = resources.QuotaPreference.to_json(resources.QuotaPreference()) + req.return_value.content = return_value + + request = cloudquotas.GetQuotaPreferenceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.QuotaPreference() + + client.get_quota_preference(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_quota_preference_rest_bad_request(request_type=cloudquotas.CreateQuotaPreferenceRequest): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_quota_preference(request) + + +@pytest.mark.parametrize("request_type", [ + cloudquotas.CreateQuotaPreferenceRequest, + dict, +]) +def test_create_quota_preference_rest_call_success(request_type): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["quota_preference"] = {'name': 'name_value', 'dimensions': {}, 'quota_config': {'preferred_value': 1595, 'state_detail': 'state_detail_value', 'granted_value': {'value': 541}, 'trace_id': 'trace_id_value', 'annotations': {}, 'request_origin': 1}, 'etag': 'etag_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'service': 'service_value', 'quota_id': 'quota_id_value', 'reconciling': True, 'justification': 'justification_value', 'contact_email': 'contact_email_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloudquotas.CreateQuotaPreferenceRequest.meta.fields["quota_preference"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["quota_preference"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["quota_preference"][field])): + del request_init["quota_preference"][field][i][subfield] + else: + del request_init["quota_preference"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference( + name='name_value', + etag='etag_value', + service='service_value', + quota_id='quota_id_value', + reconciling=True, + justification='justification_value', + contact_email='contact_email_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_quota_preference(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == 'name_value' + assert response.etag == 'etag_value' + assert response.service == 'service_value' + assert response.quota_id == 'quota_id_value' + assert response.reconciling is True + assert response.justification == 'justification_value' + assert response.contact_email == 'contact_email_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_quota_preference_rest_interceptors(null_interceptor): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudQuotasRestInterceptor(), + ) + client = CloudQuotasClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudQuotasRestInterceptor, "post_create_quota_preference") as post, \ + mock.patch.object(transports.CloudQuotasRestInterceptor, "pre_create_quota_preference") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudquotas.CreateQuotaPreferenceRequest.pb(cloudquotas.CreateQuotaPreferenceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = resources.QuotaPreference.to_json(resources.QuotaPreference()) + req.return_value.content = return_value + + request = cloudquotas.CreateQuotaPreferenceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.QuotaPreference() + + client.create_quota_preference(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_quota_preference_rest_bad_request(request_type=cloudquotas.UpdateQuotaPreferenceRequest): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'quota_preference': {'name': 'projects/sample1/locations/sample2/quotaPreferences/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.update_quota_preference(request) + + +@pytest.mark.parametrize("request_type", [ + cloudquotas.UpdateQuotaPreferenceRequest, + dict, +]) +def test_update_quota_preference_rest_call_success(request_type): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'quota_preference': {'name': 'projects/sample1/locations/sample2/quotaPreferences/sample3'}} + request_init["quota_preference"] = {'name': 'projects/sample1/locations/sample2/quotaPreferences/sample3', 'dimensions': {}, 'quota_config': {'preferred_value': 1595, 'state_detail': 'state_detail_value', 'granted_value': {'value': 541}, 'trace_id': 'trace_id_value', 'annotations': {}, 'request_origin': 1}, 'etag': 'etag_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'service': 'service_value', 'quota_id': 'quota_id_value', 'reconciling': True, 'justification': 'justification_value', 'contact_email': 'contact_email_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloudquotas.UpdateQuotaPreferenceRequest.meta.fields["quota_preference"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["quota_preference"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["quota_preference"][field])): + del request_init["quota_preference"][field][i][subfield] + else: + del request_init["quota_preference"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference( + name='name_value', + etag='etag_value', + service='service_value', + quota_id='quota_id_value', + reconciling=True, + justification='justification_value', + contact_email='contact_email_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_quota_preference(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == 'name_value' + assert response.etag == 'etag_value' + assert response.service == 'service_value' + assert response.quota_id == 'quota_id_value' + assert response.reconciling is True + assert response.justification == 'justification_value' + assert response.contact_email == 'contact_email_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_quota_preference_rest_interceptors(null_interceptor): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudQuotasRestInterceptor(), + ) + client = CloudQuotasClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudQuotasRestInterceptor, "post_update_quota_preference") as post, \ + mock.patch.object(transports.CloudQuotasRestInterceptor, "pre_update_quota_preference") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudquotas.UpdateQuotaPreferenceRequest.pb(cloudquotas.UpdateQuotaPreferenceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = resources.QuotaPreference.to_json(resources.QuotaPreference()) + req.return_value.content = return_value + + request = cloudquotas.UpdateQuotaPreferenceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.QuotaPreference() + + client.update_quota_preference(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +def test_initialize_client_w_rest(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_quota_infos_empty_call_rest(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), + '__call__') as call: + client.list_quota_infos(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.ListQuotaInfosRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_quota_info_empty_call_rest(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_info), + '__call__') as call: + client.get_quota_info(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.GetQuotaInfoRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_quota_preferences_empty_call_rest(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + '__call__') as call: + client.list_quota_preferences(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.ListQuotaPreferencesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_quota_preference_empty_call_rest(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), + '__call__') as call: + client.get_quota_preference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.GetQuotaPreferenceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_quota_preference_empty_call_rest(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), + '__call__') as call: + client.create_quota_preference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.CreateQuotaPreferenceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_quota_preference_empty_call_rest(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), + '__call__') as call: + client.update_quota_preference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudquotas.UpdateQuotaPreferenceRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudQuotasGrpcTransport, + ) + +def test_cloud_quotas_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudQuotasTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_cloud_quotas_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.cloudquotas_v1.services.cloud_quotas.transports.CloudQuotasTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.CloudQuotasTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_quota_infos', + 'get_quota_info', + 'list_quota_preferences', + 'get_quota_preference', + 'create_quota_preference', + 'update_quota_preference', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cloud_quotas_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.cloudquotas_v1.services.cloud_quotas.transports.CloudQuotasTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudQuotasTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_cloud_quotas_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.cloudquotas_v1.services.cloud_quotas.transports.CloudQuotasTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudQuotasTransport() + adc.assert_called_once() + + +def test_cloud_quotas_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudQuotasClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudQuotasGrpcTransport, + transports.CloudQuotasGrpcAsyncIOTransport, + ], +) +def test_cloud_quotas_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudQuotasGrpcTransport, + transports.CloudQuotasGrpcAsyncIOTransport, + transports.CloudQuotasRestTransport, + ], +) +def test_cloud_quotas_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudQuotasGrpcTransport, grpc_helpers), + (transports.CloudQuotasGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_cloud_quotas_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "cloudquotas.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="cloudquotas.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.CloudQuotasGrpcTransport, transports.CloudQuotasGrpcAsyncIOTransport]) +def test_cloud_quotas_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_cloud_quotas_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.CloudQuotasRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_cloud_quotas_host_no_port(transport_name): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudquotas.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'cloudquotas.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://cloudquotas.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_cloud_quotas_host_with_port(transport_name): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudquotas.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'cloudquotas.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://cloudquotas.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_cloud_quotas_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudQuotasClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudQuotasClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_quota_infos._session + session2 = client2.transport.list_quota_infos._session + assert session1 != session2 + session1 = client1.transport.get_quota_info._session + session2 = client2.transport.get_quota_info._session + assert session1 != session2 + session1 = client1.transport.list_quota_preferences._session + session2 = client2.transport.list_quota_preferences._session + assert session1 != session2 + session1 = client1.transport.get_quota_preference._session + session2 = client2.transport.get_quota_preference._session + assert session1 != session2 + session1 = client1.transport.create_quota_preference._session + session2 = client2.transport.create_quota_preference._session + assert session1 != session2 + session1 = client1.transport.update_quota_preference._session + session2 = client2.transport.update_quota_preference._session + assert session1 != session2 +def test_cloud_quotas_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudQuotasGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_quotas_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudQuotasGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudQuotasGrpcTransport, transports.CloudQuotasGrpcAsyncIOTransport]) +def test_cloud_quotas_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudQuotasGrpcTransport, transports.CloudQuotasGrpcAsyncIOTransport]) +def test_cloud_quotas_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_quota_info_path(): + project = "squid" + location = "clam" + service = "whelk" + quota_info = "octopus" + expected = "projects/{project}/locations/{location}/services/{service}/quotaInfos/{quota_info}".format(project=project, location=location, service=service, quota_info=quota_info, ) + actual = CloudQuotasClient.quota_info_path(project, location, service, quota_info) + assert expected == actual + + +def test_parse_quota_info_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "service": "cuttlefish", + "quota_info": "mussel", + } + path = CloudQuotasClient.quota_info_path(**expected) + + # Check that the path construction is reversible. + actual = CloudQuotasClient.parse_quota_info_path(path) + assert expected == actual + +def test_quota_preference_path(): + project = "winkle" + location = "nautilus" + quota_preference = "scallop" + expected = "projects/{project}/locations/{location}/quotaPreferences/{quota_preference}".format(project=project, location=location, quota_preference=quota_preference, ) + actual = CloudQuotasClient.quota_preference_path(project, location, quota_preference) + assert expected == actual + + +def test_parse_quota_preference_path(): + expected = { + "project": "abalone", + "location": "squid", + "quota_preference": "clam", + } + path = CloudQuotasClient.quota_preference_path(**expected) + + # Check that the path construction is reversible. + actual = CloudQuotasClient.parse_quota_preference_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = CloudQuotasClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = CloudQuotasClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudQuotasClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = CloudQuotasClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = CloudQuotasClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudQuotasClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = CloudQuotasClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = CloudQuotasClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudQuotasClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = CloudQuotasClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = CloudQuotasClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudQuotasClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = CloudQuotasClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = CloudQuotasClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudQuotasClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.CloudQuotasTransport, '_prep_wrapped_messages') as prep: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.CloudQuotasTransport, '_prep_wrapped_messages') as prep: + transport_class = CloudQuotasClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_grpc(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = CloudQuotasAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (CloudQuotasClient, transports.CloudQuotasGrpcTransport), + (CloudQuotasAsyncClient, transports.CloudQuotasGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-service-control/v1/.coveragerc b/owl-bot-staging/google-cloud-service-control/v1/.coveragerc new file mode 100644 index 000000000000..db7396937636 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/servicecontrol/__init__.py + google/cloud/servicecontrol/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-cloud-service-control/v1/.flake8 b/owl-bot-staging/google-cloud-service-control/v1/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-cloud-service-control/v1/MANIFEST.in b/owl-bot-staging/google-cloud-service-control/v1/MANIFEST.in new file mode 100644 index 000000000000..145db901aeb3 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/servicecontrol *.py +recursive-include google/cloud/servicecontrol_v1 *.py diff --git a/owl-bot-staging/google-cloud-service-control/v1/README.rst b/owl-bot-staging/google-cloud-service-control/v1/README.rst new file mode 100644 index 000000000000..68e48382eefb --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Servicecontrol API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Servicecontrol API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-service-control/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-service-control/v1/docs/_static/custom.css new file mode 100644 index 000000000000..06423be0b592 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/google-cloud-service-control/v1/docs/conf.py b/owl-bot-staging/google-cloud-service-control/v1/docs/conf.py new file mode 100644 index 000000000000..583bcfefe02f --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-service-control documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-service-control" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-service-control-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-service-control.tex", + u"google-cloud-service-control Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-service-control", + u"Google Cloud Servicecontrol Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-service-control", + u"google-cloud-service-control Documentation", + author, + "google-cloud-service-control", + "GAPIC library for Google Cloud Servicecontrol API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-service-control/v1/docs/index.rst b/owl-bot-staging/google-cloud-service-control/v1/docs/index.rst new file mode 100644 index 000000000000..5db37e735efd --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + servicecontrol_v1/services_ + servicecontrol_v1/types_ diff --git a/owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/quota_controller.rst b/owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/quota_controller.rst new file mode 100644 index 000000000000..7f694a1d7318 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/quota_controller.rst @@ -0,0 +1,6 @@ +QuotaController +--------------------------------- + +.. automodule:: google.cloud.servicecontrol_v1.services.quota_controller + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/service_controller.rst b/owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/service_controller.rst new file mode 100644 index 000000000000..f3d8986da94d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/service_controller.rst @@ -0,0 +1,6 @@ +ServiceController +----------------------------------- + +.. automodule:: google.cloud.servicecontrol_v1.services.service_controller + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/services_.rst b/owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/services_.rst new file mode 100644 index 000000000000..779f197c4de3 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/services_.rst @@ -0,0 +1,7 @@ +Services for Google Cloud Servicecontrol v1 API +=============================================== +.. toctree:: + :maxdepth: 2 + + quota_controller + service_controller diff --git a/owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/types_.rst b/owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/types_.rst new file mode 100644 index 000000000000..eda22bef29db --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/docs/servicecontrol_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Servicecontrol v1 API +============================================ + +.. automodule:: google.cloud.servicecontrol_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol/__init__.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol/__init__.py new file mode 100644 index 000000000000..e69512a95115 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol/__init__.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.servicecontrol import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.servicecontrol_v1.services.quota_controller.client import QuotaControllerClient +from google.cloud.servicecontrol_v1.services.quota_controller.async_client import QuotaControllerAsyncClient +from google.cloud.servicecontrol_v1.services.service_controller.client import ServiceControllerClient +from google.cloud.servicecontrol_v1.services.service_controller.async_client import ServiceControllerAsyncClient + +from google.cloud.servicecontrol_v1.types.check_error import CheckError +from google.cloud.servicecontrol_v1.types.distribution import Distribution +from google.cloud.servicecontrol_v1.types.http_request import HttpRequest +from google.cloud.servicecontrol_v1.types.log_entry import LogEntry +from google.cloud.servicecontrol_v1.types.log_entry import LogEntryOperation +from google.cloud.servicecontrol_v1.types.log_entry import LogEntrySourceLocation +from google.cloud.servicecontrol_v1.types.metric_value import MetricValue +from google.cloud.servicecontrol_v1.types.metric_value import MetricValueSet +from google.cloud.servicecontrol_v1.types.operation import Operation +from google.cloud.servicecontrol_v1.types.quota_controller import AllocateQuotaRequest +from google.cloud.servicecontrol_v1.types.quota_controller import AllocateQuotaResponse +from google.cloud.servicecontrol_v1.types.quota_controller import QuotaError +from google.cloud.servicecontrol_v1.types.quota_controller import QuotaOperation +from google.cloud.servicecontrol_v1.types.service_controller import CheckRequest +from google.cloud.servicecontrol_v1.types.service_controller import CheckResponse +from google.cloud.servicecontrol_v1.types.service_controller import ReportRequest +from google.cloud.servicecontrol_v1.types.service_controller import ReportResponse + +__all__ = ('QuotaControllerClient', + 'QuotaControllerAsyncClient', + 'ServiceControllerClient', + 'ServiceControllerAsyncClient', + 'CheckError', + 'Distribution', + 'HttpRequest', + 'LogEntry', + 'LogEntryOperation', + 'LogEntrySourceLocation', + 'MetricValue', + 'MetricValueSet', + 'Operation', + 'AllocateQuotaRequest', + 'AllocateQuotaResponse', + 'QuotaError', + 'QuotaOperation', + 'CheckRequest', + 'CheckResponse', + 'ReportRequest', + 'ReportResponse', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol/gapic_version.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol/py.typed b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol/py.typed new file mode 100644 index 000000000000..3971a5d2a8a8 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-service-control package uses inline types. diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/__init__.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/__init__.py new file mode 100644 index 000000000000..cd126cdc8f78 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/__init__.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.servicecontrol_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.quota_controller import QuotaControllerClient +from .services.quota_controller import QuotaControllerAsyncClient +from .services.service_controller import ServiceControllerClient +from .services.service_controller import ServiceControllerAsyncClient + +from .types.check_error import CheckError +from .types.distribution import Distribution +from .types.http_request import HttpRequest +from .types.log_entry import LogEntry +from .types.log_entry import LogEntryOperation +from .types.log_entry import LogEntrySourceLocation +from .types.metric_value import MetricValue +from .types.metric_value import MetricValueSet +from .types.operation import Operation +from .types.quota_controller import AllocateQuotaRequest +from .types.quota_controller import AllocateQuotaResponse +from .types.quota_controller import QuotaError +from .types.quota_controller import QuotaOperation +from .types.service_controller import CheckRequest +from .types.service_controller import CheckResponse +from .types.service_controller import ReportRequest +from .types.service_controller import ReportResponse + +__all__ = ( + 'QuotaControllerAsyncClient', + 'ServiceControllerAsyncClient', +'AllocateQuotaRequest', +'AllocateQuotaResponse', +'CheckError', +'CheckRequest', +'CheckResponse', +'Distribution', +'HttpRequest', +'LogEntry', +'LogEntryOperation', +'LogEntrySourceLocation', +'MetricValue', +'MetricValueSet', +'Operation', +'QuotaControllerClient', +'QuotaError', +'QuotaOperation', +'ReportRequest', +'ReportResponse', +'ServiceControllerClient', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/gapic_metadata.json new file mode 100644 index 000000000000..d275a5b4c724 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/gapic_metadata.json @@ -0,0 +1,92 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.servicecontrol_v1", + "protoPackage": "google.api.servicecontrol.v1", + "schema": "1.0", + "services": { + "QuotaController": { + "clients": { + "grpc": { + "libraryClient": "QuotaControllerClient", + "rpcs": { + "AllocateQuota": { + "methods": [ + "allocate_quota" + ] + } + } + }, + "grpc-async": { + "libraryClient": "QuotaControllerAsyncClient", + "rpcs": { + "AllocateQuota": { + "methods": [ + "allocate_quota" + ] + } + } + }, + "rest": { + "libraryClient": "QuotaControllerClient", + "rpcs": { + "AllocateQuota": { + "methods": [ + "allocate_quota" + ] + } + } + } + } + }, + "ServiceController": { + "clients": { + "grpc": { + "libraryClient": "ServiceControllerClient", + "rpcs": { + "Check": { + "methods": [ + "check" + ] + }, + "Report": { + "methods": [ + "report" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ServiceControllerAsyncClient", + "rpcs": { + "Check": { + "methods": [ + "check" + ] + }, + "Report": { + "methods": [ + "report" + ] + } + } + }, + "rest": { + "libraryClient": "ServiceControllerClient", + "rpcs": { + "Check": { + "methods": [ + "check" + ] + }, + "Report": { + "methods": [ + "report" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/gapic_version.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/py.typed b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/py.typed new file mode 100644 index 000000000000..3971a5d2a8a8 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-service-control package uses inline types. diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/__init__.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/__init__.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/__init__.py new file mode 100644 index 000000000000..3f6413ae07f4 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import QuotaControllerClient +from .async_client import QuotaControllerAsyncClient + +__all__ = ( + 'QuotaControllerClient', + 'QuotaControllerAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/async_client.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/async_client.py new file mode 100644 index 000000000000..95b9236ac178 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/async_client.py @@ -0,0 +1,333 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.servicecontrol_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.servicecontrol_v1.types import metric_value +from google.cloud.servicecontrol_v1.types import quota_controller +from .transports.base import QuotaControllerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import QuotaControllerGrpcAsyncIOTransport +from .client import QuotaControllerClient + + +class QuotaControllerAsyncClient: + """`Google Quota Control API `__ + + Allows clients to allocate and release quota against a `managed + service `__. + """ + + _client: QuotaControllerClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = QuotaControllerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = QuotaControllerClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = QuotaControllerClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = QuotaControllerClient._DEFAULT_UNIVERSE + + common_billing_account_path = staticmethod(QuotaControllerClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(QuotaControllerClient.parse_common_billing_account_path) + common_folder_path = staticmethod(QuotaControllerClient.common_folder_path) + parse_common_folder_path = staticmethod(QuotaControllerClient.parse_common_folder_path) + common_organization_path = staticmethod(QuotaControllerClient.common_organization_path) + parse_common_organization_path = staticmethod(QuotaControllerClient.parse_common_organization_path) + common_project_path = staticmethod(QuotaControllerClient.common_project_path) + parse_common_project_path = staticmethod(QuotaControllerClient.parse_common_project_path) + common_location_path = staticmethod(QuotaControllerClient.common_location_path) + parse_common_location_path = staticmethod(QuotaControllerClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + QuotaControllerAsyncClient: The constructed client. + """ + return QuotaControllerClient.from_service_account_info.__func__(QuotaControllerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + QuotaControllerAsyncClient: The constructed client. + """ + return QuotaControllerClient.from_service_account_file.__func__(QuotaControllerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return QuotaControllerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> QuotaControllerTransport: + """Returns the transport used by the client instance. + + Returns: + QuotaControllerTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = QuotaControllerClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, QuotaControllerTransport, Callable[..., QuotaControllerTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the quota controller async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,QuotaControllerTransport,Callable[..., QuotaControllerTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the QuotaControllerTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = QuotaControllerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def allocate_quota(self, + request: Optional[Union[quota_controller.AllocateQuotaRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> quota_controller.AllocateQuotaResponse: + r"""Attempts to allocate quota for the specified consumer. It should + be called before the operation is executed. + + This method requires the ``servicemanagement.services.quota`` + permission on the specified service. For more information, see + `Cloud IAM `__. + + **NOTE:** The client **must** fail-open on server errors + ``INTERNAL``, ``UNKNOWN``, ``DEADLINE_EXCEEDED``, and + ``UNAVAILABLE``. To ensure system reliability, the server may + inject these errors to prohibit any hard dependency on the quota + functionality. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicecontrol_v1 + + async def sample_allocate_quota(): + # Create a client + client = servicecontrol_v1.QuotaControllerAsyncClient() + + # Initialize request argument(s) + request = servicecontrol_v1.AllocateQuotaRequest( + ) + + # Make the request + response = await client.allocate_quota(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicecontrol_v1.types.AllocateQuotaRequest, dict]]): + The request object. Request message for the AllocateQuota + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicecontrol_v1.types.AllocateQuotaResponse: + Response message for the + AllocateQuota method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, quota_controller.AllocateQuotaRequest): + request = quota_controller.AllocateQuotaRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.allocate_quota] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "QuotaControllerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "QuotaControllerAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/client.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/client.py new file mode 100644 index 000000000000..bc9e1d4246e1 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/client.py @@ -0,0 +1,654 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.servicecontrol_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.servicecontrol_v1.types import metric_value +from google.cloud.servicecontrol_v1.types import quota_controller +from .transports.base import QuotaControllerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import QuotaControllerGrpcTransport +from .transports.grpc_asyncio import QuotaControllerGrpcAsyncIOTransport +from .transports.rest import QuotaControllerRestTransport + + +class QuotaControllerClientMeta(type): + """Metaclass for the QuotaController client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[QuotaControllerTransport]] + _transport_registry["grpc"] = QuotaControllerGrpcTransport + _transport_registry["grpc_asyncio"] = QuotaControllerGrpcAsyncIOTransport + _transport_registry["rest"] = QuotaControllerRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[QuotaControllerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class QuotaControllerClient(metaclass=QuotaControllerClientMeta): + """`Google Quota Control API `__ + + Allows clients to allocate and release quota against a `managed + service `__. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "servicecontrol.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "servicecontrol.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + QuotaControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + QuotaControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> QuotaControllerTransport: + """Returns the transport used by the client instance. + + Returns: + QuotaControllerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = QuotaControllerClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = QuotaControllerClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = QuotaControllerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = QuotaControllerClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, QuotaControllerTransport, Callable[..., QuotaControllerTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the quota controller client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,QuotaControllerTransport,Callable[..., QuotaControllerTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the QuotaControllerTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = QuotaControllerClient._read_environment_variables() + self._client_cert_source = QuotaControllerClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = QuotaControllerClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, QuotaControllerTransport) + if transport_provided: + # transport is a QuotaControllerTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(QuotaControllerTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + QuotaControllerClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[QuotaControllerTransport], Callable[..., QuotaControllerTransport]] = ( + QuotaControllerClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., QuotaControllerTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def allocate_quota(self, + request: Optional[Union[quota_controller.AllocateQuotaRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> quota_controller.AllocateQuotaResponse: + r"""Attempts to allocate quota for the specified consumer. It should + be called before the operation is executed. + + This method requires the ``servicemanagement.services.quota`` + permission on the specified service. For more information, see + `Cloud IAM `__. + + **NOTE:** The client **must** fail-open on server errors + ``INTERNAL``, ``UNKNOWN``, ``DEADLINE_EXCEEDED``, and + ``UNAVAILABLE``. To ensure system reliability, the server may + inject these errors to prohibit any hard dependency on the quota + functionality. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicecontrol_v1 + + def sample_allocate_quota(): + # Create a client + client = servicecontrol_v1.QuotaControllerClient() + + # Initialize request argument(s) + request = servicecontrol_v1.AllocateQuotaRequest( + ) + + # Make the request + response = client.allocate_quota(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicecontrol_v1.types.AllocateQuotaRequest, dict]): + The request object. Request message for the AllocateQuota + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicecontrol_v1.types.AllocateQuotaResponse: + Response message for the + AllocateQuota method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, quota_controller.AllocateQuotaRequest): + request = quota_controller.AllocateQuotaRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.allocate_quota] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "QuotaControllerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "QuotaControllerClient", +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/README.rst b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/README.rst new file mode 100644 index 000000000000..6311b281591e --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`QuotaControllerTransport` is the ABC for all transports. +- public child `QuotaControllerGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `QuotaControllerGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseQuotaControllerRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `QuotaControllerRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/__init__.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/__init__.py new file mode 100644 index 000000000000..567270ab6ba0 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import QuotaControllerTransport +from .grpc import QuotaControllerGrpcTransport +from .grpc_asyncio import QuotaControllerGrpcAsyncIOTransport +from .rest import QuotaControllerRestTransport +from .rest import QuotaControllerRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[QuotaControllerTransport]] +_transport_registry['grpc'] = QuotaControllerGrpcTransport +_transport_registry['grpc_asyncio'] = QuotaControllerGrpcAsyncIOTransport +_transport_registry['rest'] = QuotaControllerRestTransport + +__all__ = ( + 'QuotaControllerTransport', + 'QuotaControllerGrpcTransport', + 'QuotaControllerGrpcAsyncIOTransport', + 'QuotaControllerRestTransport', + 'QuotaControllerRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/base.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/base.py new file mode 100644 index 000000000000..0bf52de7d1c3 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/base.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.servicecontrol_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.servicecontrol_v1.types import quota_controller + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class QuotaControllerTransport(abc.ABC): + """Abstract transport class for QuotaController.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/servicecontrol', + ) + + DEFAULT_HOST: str = 'servicecontrol.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicecontrol.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.allocate_quota: gapic_v1.method.wrap_method( + self.allocate_quota, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def allocate_quota(self) -> Callable[ + [quota_controller.AllocateQuotaRequest], + Union[ + quota_controller.AllocateQuotaResponse, + Awaitable[quota_controller.AllocateQuotaResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'QuotaControllerTransport', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/grpc.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/grpc.py new file mode 100644 index 000000000000..ce2403825357 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/grpc.py @@ -0,0 +1,284 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.servicecontrol_v1.types import quota_controller +from .base import QuotaControllerTransport, DEFAULT_CLIENT_INFO + + +class QuotaControllerGrpcTransport(QuotaControllerTransport): + """gRPC backend transport for QuotaController. + + `Google Quota Control API `__ + + Allows clients to allocate and release quota against a `managed + service `__. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicecontrol.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def allocate_quota(self) -> Callable[ + [quota_controller.AllocateQuotaRequest], + quota_controller.AllocateQuotaResponse]: + r"""Return a callable for the allocate quota method over gRPC. + + Attempts to allocate quota for the specified consumer. It should + be called before the operation is executed. + + This method requires the ``servicemanagement.services.quota`` + permission on the specified service. For more information, see + `Cloud IAM `__. + + **NOTE:** The client **must** fail-open on server errors + ``INTERNAL``, ``UNKNOWN``, ``DEADLINE_EXCEEDED``, and + ``UNAVAILABLE``. To ensure system reliability, the server may + inject these errors to prohibit any hard dependency on the quota + functionality. + + Returns: + Callable[[~.AllocateQuotaRequest], + ~.AllocateQuotaResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'allocate_quota' not in self._stubs: + self._stubs['allocate_quota'] = self.grpc_channel.unary_unary( + '/google.api.servicecontrol.v1.QuotaController/AllocateQuota', + request_serializer=quota_controller.AllocateQuotaRequest.serialize, + response_deserializer=quota_controller.AllocateQuotaResponse.deserialize, + ) + return self._stubs['allocate_quota'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'QuotaControllerGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a51e155a9959 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/grpc_asyncio.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.servicecontrol_v1.types import quota_controller +from .base import QuotaControllerTransport, DEFAULT_CLIENT_INFO +from .grpc import QuotaControllerGrpcTransport + + +class QuotaControllerGrpcAsyncIOTransport(QuotaControllerTransport): + """gRPC AsyncIO backend transport for QuotaController. + + `Google Quota Control API `__ + + Allows clients to allocate and release quota against a `managed + service `__. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicecontrol.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def allocate_quota(self) -> Callable[ + [quota_controller.AllocateQuotaRequest], + Awaitable[quota_controller.AllocateQuotaResponse]]: + r"""Return a callable for the allocate quota method over gRPC. + + Attempts to allocate quota for the specified consumer. It should + be called before the operation is executed. + + This method requires the ``servicemanagement.services.quota`` + permission on the specified service. For more information, see + `Cloud IAM `__. + + **NOTE:** The client **must** fail-open on server errors + ``INTERNAL``, ``UNKNOWN``, ``DEADLINE_EXCEEDED``, and + ``UNAVAILABLE``. To ensure system reliability, the server may + inject these errors to prohibit any hard dependency on the quota + functionality. + + Returns: + Callable[[~.AllocateQuotaRequest], + Awaitable[~.AllocateQuotaResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'allocate_quota' not in self._stubs: + self._stubs['allocate_quota'] = self.grpc_channel.unary_unary( + '/google.api.servicecontrol.v1.QuotaController/AllocateQuota', + request_serializer=quota_controller.AllocateQuotaRequest.serialize, + response_deserializer=quota_controller.AllocateQuotaResponse.deserialize, + ) + return self._stubs['allocate_quota'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.allocate_quota: self._wrap_method( + self.allocate_quota, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + +__all__ = ( + 'QuotaControllerGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/rest.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/rest.py new file mode 100644 index 000000000000..454e15cd8e52 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/rest.py @@ -0,0 +1,281 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 + +from google.protobuf import json_format + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.servicecontrol_v1.types import quota_controller + + +from .rest_base import _BaseQuotaControllerRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class QuotaControllerRestInterceptor: + """Interceptor for QuotaController. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the QuotaControllerRestTransport. + + .. code-block:: python + class MyCustomQuotaControllerInterceptor(QuotaControllerRestInterceptor): + def pre_allocate_quota(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_allocate_quota(self, response): + logging.log(f"Received response: {response}") + return response + + transport = QuotaControllerRestTransport(interceptor=MyCustomQuotaControllerInterceptor()) + client = QuotaControllerClient(transport=transport) + + + """ + def pre_allocate_quota(self, request: quota_controller.AllocateQuotaRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[quota_controller.AllocateQuotaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for allocate_quota + + Override in a subclass to manipulate the request or metadata + before they are sent to the QuotaController server. + """ + return request, metadata + + def post_allocate_quota(self, response: quota_controller.AllocateQuotaResponse) -> quota_controller.AllocateQuotaResponse: + """Post-rpc interceptor for allocate_quota + + Override in a subclass to manipulate the response + after it is returned by the QuotaController server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class QuotaControllerRestStub: + _session: AuthorizedSession + _host: str + _interceptor: QuotaControllerRestInterceptor + + +class QuotaControllerRestTransport(_BaseQuotaControllerRestTransport): + """REST backend synchronous transport for QuotaController. + + `Google Quota Control API `__ + + Allows clients to allocate and release quota against a `managed + service `__. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[QuotaControllerRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicecontrol.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or QuotaControllerRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AllocateQuota(_BaseQuotaControllerRestTransport._BaseAllocateQuota, QuotaControllerRestStub): + def __hash__(self): + return hash("QuotaControllerRestTransport.AllocateQuota") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: quota_controller.AllocateQuotaRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> quota_controller.AllocateQuotaResponse: + r"""Call the allocate quota method over HTTP. + + Args: + request (~.quota_controller.AllocateQuotaRequest): + The request object. Request message for the AllocateQuota + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.quota_controller.AllocateQuotaResponse: + Response message for the + AllocateQuota method. + + """ + + http_options = _BaseQuotaControllerRestTransport._BaseAllocateQuota._get_http_options() + request, metadata = self._interceptor.pre_allocate_quota(request, metadata) + transcoded_request = _BaseQuotaControllerRestTransport._BaseAllocateQuota._get_transcoded_request(http_options, request) + + body = _BaseQuotaControllerRestTransport._BaseAllocateQuota._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseQuotaControllerRestTransport._BaseAllocateQuota._get_query_params_json(transcoded_request) + + # Send the request + response = QuotaControllerRestTransport._AllocateQuota._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = quota_controller.AllocateQuotaResponse() + pb_resp = quota_controller.AllocateQuotaResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_allocate_quota(resp) + return resp + + @property + def allocate_quota(self) -> Callable[ + [quota_controller.AllocateQuotaRequest], + quota_controller.AllocateQuotaResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AllocateQuota(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'QuotaControllerRestTransport', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/rest_base.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/rest_base.py new file mode 100644 index 000000000000..29b10c635c99 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/quota_controller/transports/rest_base.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import QuotaControllerTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.servicecontrol_v1.types import quota_controller + + +class _BaseQuotaControllerRestTransport(QuotaControllerTransport): + """Base REST backend transport for QuotaController. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicecontrol.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseAllocateQuota: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/services/{service_name}:allocateQuota', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = quota_controller.AllocateQuotaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + +__all__=( + '_BaseQuotaControllerRestTransport', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/__init__.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/__init__.py new file mode 100644 index 000000000000..919b41bcdebf --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ServiceControllerClient +from .async_client import ServiceControllerAsyncClient + +__all__ = ( + 'ServiceControllerClient', + 'ServiceControllerAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/async_client.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/async_client.py new file mode 100644 index 000000000000..8dcc3d1d4687 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/async_client.py @@ -0,0 +1,438 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.servicecontrol_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.servicecontrol_v1.types import check_error +from google.cloud.servicecontrol_v1.types import service_controller +from .transports.base import ServiceControllerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import ServiceControllerGrpcAsyncIOTransport +from .client import ServiceControllerClient + + +class ServiceControllerAsyncClient: + """`Google Service Control API `__ + + Lets clients check and report operations against a `managed + service `__. + """ + + _client: ServiceControllerClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ServiceControllerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ServiceControllerClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ServiceControllerClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ServiceControllerClient._DEFAULT_UNIVERSE + + common_billing_account_path = staticmethod(ServiceControllerClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(ServiceControllerClient.parse_common_billing_account_path) + common_folder_path = staticmethod(ServiceControllerClient.common_folder_path) + parse_common_folder_path = staticmethod(ServiceControllerClient.parse_common_folder_path) + common_organization_path = staticmethod(ServiceControllerClient.common_organization_path) + parse_common_organization_path = staticmethod(ServiceControllerClient.parse_common_organization_path) + common_project_path = staticmethod(ServiceControllerClient.common_project_path) + parse_common_project_path = staticmethod(ServiceControllerClient.parse_common_project_path) + common_location_path = staticmethod(ServiceControllerClient.common_location_path) + parse_common_location_path = staticmethod(ServiceControllerClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceControllerAsyncClient: The constructed client. + """ + return ServiceControllerClient.from_service_account_info.__func__(ServiceControllerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceControllerAsyncClient: The constructed client. + """ + return ServiceControllerClient.from_service_account_file.__func__(ServiceControllerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ServiceControllerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ServiceControllerTransport: + """Returns the transport used by the client instance. + + Returns: + ServiceControllerTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ServiceControllerClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ServiceControllerTransport, Callable[..., ServiceControllerTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the service controller async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ServiceControllerTransport,Callable[..., ServiceControllerTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ServiceControllerTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ServiceControllerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def check(self, + request: Optional[Union[service_controller.CheckRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service_controller.CheckResponse: + r"""Checks whether an operation on a service should be allowed to + proceed based on the configuration of the service and related + policies. It must be called before the operation is executed. + + If feasible, the client should cache the check results and reuse + them for 60 seconds. In case of any server errors, the client + should rely on the cached results for much longer time to avoid + outage. WARNING: There is general 60s delay for the + configuration and policy propagation, therefore callers MUST NOT + depend on the ``Check`` method having the latest policy + information. + + NOTE: the + [CheckRequest][google.api.servicecontrol.v1.CheckRequest] has + the size limit (wire-format byte size) of 1MB. + + This method requires the ``servicemanagement.services.check`` + permission on the specified service. For more information, see + `Cloud IAM `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicecontrol_v1 + + async def sample_check(): + # Create a client + client = servicecontrol_v1.ServiceControllerAsyncClient() + + # Initialize request argument(s) + request = servicecontrol_v1.CheckRequest( + ) + + # Make the request + response = await client.check(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicecontrol_v1.types.CheckRequest, dict]]): + The request object. Request message for the Check method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicecontrol_v1.types.CheckResponse: + Response message for the Check + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service_controller.CheckRequest): + request = service_controller.CheckRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.check] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def report(self, + request: Optional[Union[service_controller.ReportRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service_controller.ReportResponse: + r"""Reports operation results to Google Service Control, such as + logs and metrics. It should be called after an operation is + completed. + + If feasible, the client should aggregate reporting data for up + to 5 seconds to reduce API traffic. Limiting aggregation to 5 + seconds is to reduce data loss during client crashes. Clients + should carefully choose the aggregation time window to avoid + data loss risk more than 0.01% for business and compliance + reasons. + + NOTE: the + [ReportRequest][google.api.servicecontrol.v1.ReportRequest] has + the size limit (wire-format byte size) of 1MB. + + This method requires the ``servicemanagement.services.report`` + permission on the specified service. For more information, see + `Google Cloud IAM `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicecontrol_v1 + + async def sample_report(): + # Create a client + client = servicecontrol_v1.ServiceControllerAsyncClient() + + # Initialize request argument(s) + request = servicecontrol_v1.ReportRequest( + ) + + # Make the request + response = await client.report(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicecontrol_v1.types.ReportRequest, dict]]): + The request object. Request message for the Report + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicecontrol_v1.types.ReportResponse: + Response message for the Report + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service_controller.ReportRequest): + request = service_controller.ReportRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ServiceControllerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ServiceControllerAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/client.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/client.py new file mode 100644 index 000000000000..595f9e43ccc8 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/client.py @@ -0,0 +1,759 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.servicecontrol_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.servicecontrol_v1.types import check_error +from google.cloud.servicecontrol_v1.types import service_controller +from .transports.base import ServiceControllerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import ServiceControllerGrpcTransport +from .transports.grpc_asyncio import ServiceControllerGrpcAsyncIOTransport +from .transports.rest import ServiceControllerRestTransport + + +class ServiceControllerClientMeta(type): + """Metaclass for the ServiceController client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ServiceControllerTransport]] + _transport_registry["grpc"] = ServiceControllerGrpcTransport + _transport_registry["grpc_asyncio"] = ServiceControllerGrpcAsyncIOTransport + _transport_registry["rest"] = ServiceControllerRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ServiceControllerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ServiceControllerClient(metaclass=ServiceControllerClientMeta): + """`Google Service Control API `__ + + Lets clients check and report operations against a `managed + service `__. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "servicecontrol.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "servicecontrol.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ServiceControllerTransport: + """Returns the transport used by the client instance. + + Returns: + ServiceControllerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = ServiceControllerClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = ServiceControllerClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ServiceControllerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ServiceControllerClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ServiceControllerTransport, Callable[..., ServiceControllerTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the service controller client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ServiceControllerTransport,Callable[..., ServiceControllerTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ServiceControllerTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ServiceControllerClient._read_environment_variables() + self._client_cert_source = ServiceControllerClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = ServiceControllerClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ServiceControllerTransport) + if transport_provided: + # transport is a ServiceControllerTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ServiceControllerTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + ServiceControllerClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[ServiceControllerTransport], Callable[..., ServiceControllerTransport]] = ( + ServiceControllerClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ServiceControllerTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def check(self, + request: Optional[Union[service_controller.CheckRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service_controller.CheckResponse: + r"""Checks whether an operation on a service should be allowed to + proceed based on the configuration of the service and related + policies. It must be called before the operation is executed. + + If feasible, the client should cache the check results and reuse + them for 60 seconds. In case of any server errors, the client + should rely on the cached results for much longer time to avoid + outage. WARNING: There is general 60s delay for the + configuration and policy propagation, therefore callers MUST NOT + depend on the ``Check`` method having the latest policy + information. + + NOTE: the + [CheckRequest][google.api.servicecontrol.v1.CheckRequest] has + the size limit (wire-format byte size) of 1MB. + + This method requires the ``servicemanagement.services.check`` + permission on the specified service. For more information, see + `Cloud IAM `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicecontrol_v1 + + def sample_check(): + # Create a client + client = servicecontrol_v1.ServiceControllerClient() + + # Initialize request argument(s) + request = servicecontrol_v1.CheckRequest( + ) + + # Make the request + response = client.check(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicecontrol_v1.types.CheckRequest, dict]): + The request object. Request message for the Check method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicecontrol_v1.types.CheckResponse: + Response message for the Check + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service_controller.CheckRequest): + request = service_controller.CheckRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.check] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def report(self, + request: Optional[Union[service_controller.ReportRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service_controller.ReportResponse: + r"""Reports operation results to Google Service Control, such as + logs and metrics. It should be called after an operation is + completed. + + If feasible, the client should aggregate reporting data for up + to 5 seconds to reduce API traffic. Limiting aggregation to 5 + seconds is to reduce data loss during client crashes. Clients + should carefully choose the aggregation time window to avoid + data loss risk more than 0.01% for business and compliance + reasons. + + NOTE: the + [ReportRequest][google.api.servicecontrol.v1.ReportRequest] has + the size limit (wire-format byte size) of 1MB. + + This method requires the ``servicemanagement.services.report`` + permission on the specified service. For more information, see + `Google Cloud IAM `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicecontrol_v1 + + def sample_report(): + # Create a client + client = servicecontrol_v1.ServiceControllerClient() + + # Initialize request argument(s) + request = servicecontrol_v1.ReportRequest( + ) + + # Make the request + response = client.report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicecontrol_v1.types.ReportRequest, dict]): + The request object. Request message for the Report + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicecontrol_v1.types.ReportResponse: + Response message for the Report + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service_controller.ReportRequest): + request = service_controller.ReportRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ServiceControllerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ServiceControllerClient", +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/README.rst b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/README.rst new file mode 100644 index 000000000000..ab03c3a4d96b --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`ServiceControllerTransport` is the ABC for all transports. +- public child `ServiceControllerGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `ServiceControllerGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseServiceControllerRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `ServiceControllerRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/__init__.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/__init__.py new file mode 100644 index 000000000000..0ec5461601ee --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ServiceControllerTransport +from .grpc import ServiceControllerGrpcTransport +from .grpc_asyncio import ServiceControllerGrpcAsyncIOTransport +from .rest import ServiceControllerRestTransport +from .rest import ServiceControllerRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ServiceControllerTransport]] +_transport_registry['grpc'] = ServiceControllerGrpcTransport +_transport_registry['grpc_asyncio'] = ServiceControllerGrpcAsyncIOTransport +_transport_registry['rest'] = ServiceControllerRestTransport + +__all__ = ( + 'ServiceControllerTransport', + 'ServiceControllerGrpcTransport', + 'ServiceControllerGrpcAsyncIOTransport', + 'ServiceControllerRestTransport', + 'ServiceControllerRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/base.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/base.py new file mode 100644 index 000000000000..c925e1bb3ac3 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/base.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.servicecontrol_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.servicecontrol_v1.types import service_controller + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ServiceControllerTransport(abc.ABC): + """Abstract transport class for ServiceController.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/servicecontrol', + ) + + DEFAULT_HOST: str = 'servicecontrol.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicecontrol.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.check: gapic_v1.method.wrap_method( + self.check, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=5.0, + ), + default_timeout=5.0, + client_info=client_info, + ), + self.report: gapic_v1.method.wrap_method( + self.report, + default_timeout=16.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def check(self) -> Callable[ + [service_controller.CheckRequest], + Union[ + service_controller.CheckResponse, + Awaitable[service_controller.CheckResponse] + ]]: + raise NotImplementedError() + + @property + def report(self) -> Callable[ + [service_controller.ReportRequest], + Union[ + service_controller.ReportResponse, + Awaitable[service_controller.ReportResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'ServiceControllerTransport', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/grpc.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/grpc.py new file mode 100644 index 000000000000..2a00d3a3d1b6 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/grpc.py @@ -0,0 +1,334 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.servicecontrol_v1.types import service_controller +from .base import ServiceControllerTransport, DEFAULT_CLIENT_INFO + + +class ServiceControllerGrpcTransport(ServiceControllerTransport): + """gRPC backend transport for ServiceController. + + `Google Service Control API `__ + + Lets clients check and report operations against a `managed + service `__. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicecontrol.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def check(self) -> Callable[ + [service_controller.CheckRequest], + service_controller.CheckResponse]: + r"""Return a callable for the check method over gRPC. + + Checks whether an operation on a service should be allowed to + proceed based on the configuration of the service and related + policies. It must be called before the operation is executed. + + If feasible, the client should cache the check results and reuse + them for 60 seconds. In case of any server errors, the client + should rely on the cached results for much longer time to avoid + outage. WARNING: There is general 60s delay for the + configuration and policy propagation, therefore callers MUST NOT + depend on the ``Check`` method having the latest policy + information. + + NOTE: the + [CheckRequest][google.api.servicecontrol.v1.CheckRequest] has + the size limit (wire-format byte size) of 1MB. + + This method requires the ``servicemanagement.services.check`` + permission on the specified service. For more information, see + `Cloud IAM `__. + + Returns: + Callable[[~.CheckRequest], + ~.CheckResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'check' not in self._stubs: + self._stubs['check'] = self.grpc_channel.unary_unary( + '/google.api.servicecontrol.v1.ServiceController/Check', + request_serializer=service_controller.CheckRequest.serialize, + response_deserializer=service_controller.CheckResponse.deserialize, + ) + return self._stubs['check'] + + @property + def report(self) -> Callable[ + [service_controller.ReportRequest], + service_controller.ReportResponse]: + r"""Return a callable for the report method over gRPC. + + Reports operation results to Google Service Control, such as + logs and metrics. It should be called after an operation is + completed. + + If feasible, the client should aggregate reporting data for up + to 5 seconds to reduce API traffic. Limiting aggregation to 5 + seconds is to reduce data loss during client crashes. Clients + should carefully choose the aggregation time window to avoid + data loss risk more than 0.01% for business and compliance + reasons. + + NOTE: the + [ReportRequest][google.api.servicecontrol.v1.ReportRequest] has + the size limit (wire-format byte size) of 1MB. + + This method requires the ``servicemanagement.services.report`` + permission on the specified service. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.ReportRequest], + ~.ReportResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'report' not in self._stubs: + self._stubs['report'] = self.grpc_channel.unary_unary( + '/google.api.servicecontrol.v1.ServiceController/Report', + request_serializer=service_controller.ReportRequest.serialize, + response_deserializer=service_controller.ReportResponse.deserialize, + ) + return self._stubs['report'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'ServiceControllerGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/grpc_asyncio.py new file mode 100644 index 000000000000..4b3875d1626c --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/grpc_asyncio.py @@ -0,0 +1,369 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.servicecontrol_v1.types import service_controller +from .base import ServiceControllerTransport, DEFAULT_CLIENT_INFO +from .grpc import ServiceControllerGrpcTransport + + +class ServiceControllerGrpcAsyncIOTransport(ServiceControllerTransport): + """gRPC AsyncIO backend transport for ServiceController. + + `Google Service Control API `__ + + Lets clients check and report operations against a `managed + service `__. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicecontrol.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def check(self) -> Callable[ + [service_controller.CheckRequest], + Awaitable[service_controller.CheckResponse]]: + r"""Return a callable for the check method over gRPC. + + Checks whether an operation on a service should be allowed to + proceed based on the configuration of the service and related + policies. It must be called before the operation is executed. + + If feasible, the client should cache the check results and reuse + them for 60 seconds. In case of any server errors, the client + should rely on the cached results for much longer time to avoid + outage. WARNING: There is general 60s delay for the + configuration and policy propagation, therefore callers MUST NOT + depend on the ``Check`` method having the latest policy + information. + + NOTE: the + [CheckRequest][google.api.servicecontrol.v1.CheckRequest] has + the size limit (wire-format byte size) of 1MB. + + This method requires the ``servicemanagement.services.check`` + permission on the specified service. For more information, see + `Cloud IAM `__. + + Returns: + Callable[[~.CheckRequest], + Awaitable[~.CheckResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'check' not in self._stubs: + self._stubs['check'] = self.grpc_channel.unary_unary( + '/google.api.servicecontrol.v1.ServiceController/Check', + request_serializer=service_controller.CheckRequest.serialize, + response_deserializer=service_controller.CheckResponse.deserialize, + ) + return self._stubs['check'] + + @property + def report(self) -> Callable[ + [service_controller.ReportRequest], + Awaitable[service_controller.ReportResponse]]: + r"""Return a callable for the report method over gRPC. + + Reports operation results to Google Service Control, such as + logs and metrics. It should be called after an operation is + completed. + + If feasible, the client should aggregate reporting data for up + to 5 seconds to reduce API traffic. Limiting aggregation to 5 + seconds is to reduce data loss during client crashes. Clients + should carefully choose the aggregation time window to avoid + data loss risk more than 0.01% for business and compliance + reasons. + + NOTE: the + [ReportRequest][google.api.servicecontrol.v1.ReportRequest] has + the size limit (wire-format byte size) of 1MB. + + This method requires the ``servicemanagement.services.report`` + permission on the specified service. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.ReportRequest], + Awaitable[~.ReportResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'report' not in self._stubs: + self._stubs['report'] = self.grpc_channel.unary_unary( + '/google.api.servicecontrol.v1.ServiceController/Report', + request_serializer=service_controller.ReportRequest.serialize, + response_deserializer=service_controller.ReportResponse.deserialize, + ) + return self._stubs['report'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.check: self._wrap_method( + self.check, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=5.0, + ), + default_timeout=5.0, + client_info=client_info, + ), + self.report: self._wrap_method( + self.report, + default_timeout=16.0, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + +__all__ = ( + 'ServiceControllerGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/rest.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/rest.py new file mode 100644 index 000000000000..1a6e97151289 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/rest.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 + +from google.protobuf import json_format + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.servicecontrol_v1.types import service_controller + + +from .rest_base import _BaseServiceControllerRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class ServiceControllerRestInterceptor: + """Interceptor for ServiceController. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ServiceControllerRestTransport. + + .. code-block:: python + class MyCustomServiceControllerInterceptor(ServiceControllerRestInterceptor): + def pre_check(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_check(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_report(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ServiceControllerRestTransport(interceptor=MyCustomServiceControllerInterceptor()) + client = ServiceControllerClient(transport=transport) + + + """ + def pre_check(self, request: service_controller.CheckRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[service_controller.CheckRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for check + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceController server. + """ + return request, metadata + + def post_check(self, response: service_controller.CheckResponse) -> service_controller.CheckResponse: + """Post-rpc interceptor for check + + Override in a subclass to manipulate the response + after it is returned by the ServiceController server but before + it is returned to user code. + """ + return response + + def pre_report(self, request: service_controller.ReportRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[service_controller.ReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for report + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceController server. + """ + return request, metadata + + def post_report(self, response: service_controller.ReportResponse) -> service_controller.ReportResponse: + """Post-rpc interceptor for report + + Override in a subclass to manipulate the response + after it is returned by the ServiceController server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ServiceControllerRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ServiceControllerRestInterceptor + + +class ServiceControllerRestTransport(_BaseServiceControllerRestTransport): + """REST backend synchronous transport for ServiceController. + + `Google Service Control API `__ + + Lets clients check and report operations against a `managed + service `__. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[ServiceControllerRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicecontrol.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ServiceControllerRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Check(_BaseServiceControllerRestTransport._BaseCheck, ServiceControllerRestStub): + def __hash__(self): + return hash("ServiceControllerRestTransport.Check") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service_controller.CheckRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> service_controller.CheckResponse: + r"""Call the check method over HTTP. + + Args: + request (~.service_controller.CheckRequest): + The request object. Request message for the Check method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service_controller.CheckResponse: + Response message for the Check + method. + + """ + + http_options = _BaseServiceControllerRestTransport._BaseCheck._get_http_options() + request, metadata = self._interceptor.pre_check(request, metadata) + transcoded_request = _BaseServiceControllerRestTransport._BaseCheck._get_transcoded_request(http_options, request) + + body = _BaseServiceControllerRestTransport._BaseCheck._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseServiceControllerRestTransport._BaseCheck._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceControllerRestTransport._Check._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service_controller.CheckResponse() + pb_resp = service_controller.CheckResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_check(resp) + return resp + + class _Report(_BaseServiceControllerRestTransport._BaseReport, ServiceControllerRestStub): + def __hash__(self): + return hash("ServiceControllerRestTransport.Report") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service_controller.ReportRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> service_controller.ReportResponse: + r"""Call the report method over HTTP. + + Args: + request (~.service_controller.ReportRequest): + The request object. Request message for the Report + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service_controller.ReportResponse: + Response message for the Report + method. + + """ + + http_options = _BaseServiceControllerRestTransport._BaseReport._get_http_options() + request, metadata = self._interceptor.pre_report(request, metadata) + transcoded_request = _BaseServiceControllerRestTransport._BaseReport._get_transcoded_request(http_options, request) + + body = _BaseServiceControllerRestTransport._BaseReport._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseServiceControllerRestTransport._BaseReport._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceControllerRestTransport._Report._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service_controller.ReportResponse() + pb_resp = service_controller.ReportResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_report(resp) + return resp + + @property + def check(self) -> Callable[ + [service_controller.CheckRequest], + service_controller.CheckResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Check(self._session, self._host, self._interceptor) # type: ignore + + @property + def report(self) -> Callable[ + [service_controller.ReportRequest], + service_controller.ReportResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Report(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ServiceControllerRestTransport', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/rest_base.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/rest_base.py new file mode 100644 index 000000000000..ca0c3d488b87 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/services/service_controller/transports/rest_base.py @@ -0,0 +1,169 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import ServiceControllerTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.servicecontrol_v1.types import service_controller + + +class _BaseServiceControllerRestTransport(ServiceControllerTransport): + """Base REST backend transport for ServiceController. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicecontrol.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCheck: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/services/{service_name}:check', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service_controller.CheckRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseReport: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/services/{service_name}:report', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service_controller.ReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + +__all__=( + '_BaseServiceControllerRestTransport', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/__init__.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/__init__.py new file mode 100644 index 000000000000..08ab89e0dbd5 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/__init__.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .check_error import ( + CheckError, +) +from .distribution import ( + Distribution, +) +from .http_request import ( + HttpRequest, +) +from .log_entry import ( + LogEntry, + LogEntryOperation, + LogEntrySourceLocation, +) +from .metric_value import ( + MetricValue, + MetricValueSet, +) +from .operation import ( + Operation, +) +from .quota_controller import ( + AllocateQuotaRequest, + AllocateQuotaResponse, + QuotaError, + QuotaOperation, +) +from .service_controller import ( + CheckRequest, + CheckResponse, + ReportRequest, + ReportResponse, +) + +__all__ = ( + 'CheckError', + 'Distribution', + 'HttpRequest', + 'LogEntry', + 'LogEntryOperation', + 'LogEntrySourceLocation', + 'MetricValue', + 'MetricValueSet', + 'Operation', + 'AllocateQuotaRequest', + 'AllocateQuotaResponse', + 'QuotaError', + 'QuotaOperation', + 'CheckRequest', + 'CheckResponse', + 'ReportRequest', + 'ReportResponse', +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/check_error.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/check_error.py new file mode 100644 index 000000000000..e26da30e1ac6 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/check_error.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.api.servicecontrol.v1', + manifest={ + 'CheckError', + }, +) + + +class CheckError(proto.Message): + r"""Defines the errors to be returned in + [google.api.servicecontrol.v1.CheckResponse.check_errors][google.api.servicecontrol.v1.CheckResponse.check_errors]. + + Attributes: + code (google.cloud.servicecontrol_v1.types.CheckError.Code): + The error code. + subject (str): + Subject to whom this error applies. See the + specific code enum for more details on this + field. For example: + + - "project:" + - "folder:" + - "organization:". + detail (str): + Free-form text providing details on the error + cause of the error. + status (google.rpc.status_pb2.Status): + Contains public information about the check error. If + available, ``status.code`` will be non zero and client can + propagate it out as public error. + """ + class Code(proto.Enum): + r"""Error codes for Check responses. + + Values: + ERROR_CODE_UNSPECIFIED (0): + This is never used in ``CheckResponse``. + NOT_FOUND (5): + The consumer's project id, network container, or resource + container was not found. Same as + [google.rpc.Code.NOT_FOUND][google.rpc.Code.NOT_FOUND]. + PERMISSION_DENIED (7): + The consumer doesn't have access to the specified resource. + Same as + [google.rpc.Code.PERMISSION_DENIED][google.rpc.Code.PERMISSION_DENIED]. + RESOURCE_EXHAUSTED (8): + Quota check failed. Same as + [google.rpc.Code.RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED]. + SERVICE_NOT_ACTIVATED (104): + The consumer hasn't activated the service. + BILLING_DISABLED (107): + The consumer cannot access the service + because billing is disabled. + PROJECT_DELETED (108): + The consumer's project has been marked as + deleted (soft deletion). + PROJECT_INVALID (114): + The consumer's project number or id does not + represent a valid project. + CONSUMER_INVALID (125): + The input consumer info does not represent a + valid consumer folder or organization. + IP_ADDRESS_BLOCKED (109): + The IP address of the consumer is invalid for + the specific consumer project. + REFERER_BLOCKED (110): + The referer address of the consumer request + is invalid for the specific consumer project. + CLIENT_APP_BLOCKED (111): + The client application of the consumer + request is invalid for the specific consumer + project. + API_TARGET_BLOCKED (122): + The API targeted by this request is invalid + for the specified consumer project. + API_KEY_INVALID (105): + The consumer's API key is invalid. + API_KEY_EXPIRED (112): + The consumer's API Key has expired. + API_KEY_NOT_FOUND (113): + The consumer's API Key was not found in + config record. + INVALID_CREDENTIAL (123): + The credential in the request can not be + verified. + NAMESPACE_LOOKUP_UNAVAILABLE (300): + The backend server for looking up project + id/number is unavailable. + SERVICE_STATUS_UNAVAILABLE (301): + The backend server for checking service + status is unavailable. + BILLING_STATUS_UNAVAILABLE (302): + The backend server for checking billing + status is unavailable. + CLOUD_RESOURCE_MANAGER_BACKEND_UNAVAILABLE (305): + Cloud Resource Manager backend server is + unavailable. + """ + ERROR_CODE_UNSPECIFIED = 0 + NOT_FOUND = 5 + PERMISSION_DENIED = 7 + RESOURCE_EXHAUSTED = 8 + SERVICE_NOT_ACTIVATED = 104 + BILLING_DISABLED = 107 + PROJECT_DELETED = 108 + PROJECT_INVALID = 114 + CONSUMER_INVALID = 125 + IP_ADDRESS_BLOCKED = 109 + REFERER_BLOCKED = 110 + CLIENT_APP_BLOCKED = 111 + API_TARGET_BLOCKED = 122 + API_KEY_INVALID = 105 + API_KEY_EXPIRED = 112 + API_KEY_NOT_FOUND = 113 + INVALID_CREDENTIAL = 123 + NAMESPACE_LOOKUP_UNAVAILABLE = 300 + SERVICE_STATUS_UNAVAILABLE = 301 + BILLING_STATUS_UNAVAILABLE = 302 + CLOUD_RESOURCE_MANAGER_BACKEND_UNAVAILABLE = 305 + + code: Code = proto.Field( + proto.ENUM, + number=1, + enum=Code, + ) + subject: str = proto.Field( + proto.STRING, + number=4, + ) + detail: str = proto.Field( + proto.STRING, + number=2, + ) + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=3, + message=status_pb2.Status, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/distribution.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/distribution.py new file mode 100644 index 000000000000..da68a3c2c316 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/distribution.py @@ -0,0 +1,241 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.api import distribution_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.api.servicecontrol.v1', + manifest={ + 'Distribution', + }, +) + + +class Distribution(proto.Message): + r"""Distribution represents a frequency distribution of double-valued + sample points. It contains the size of the population of sample + points plus additional optional information: + + - the arithmetic mean of the samples + - the minimum and maximum of the samples + - the sum-squared-deviation of the samples, used to compute + variance + - a histogram of the values of the sample points + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + count (int): + The total number of samples in the + distribution. Must be >= 0. + mean (float): + The arithmetic mean of the samples in the distribution. If + ``count`` is zero then this field must be zero. + minimum (float): + The minimum of the population of values. Ignored if + ``count`` is zero. + maximum (float): + The maximum of the population of values. Ignored if + ``count`` is zero. + sum_of_squared_deviation (float): + The sum of squared deviations from the mean: + Sum[i=1..count]((x_i - mean)^2) where each x_i is a sample + values. If ``count`` is zero then this field must be zero, + otherwise validation of the request fails. + bucket_counts (MutableSequence[int]): + The number of samples in each histogram bucket. + ``bucket_counts`` are optional. If present, they must sum to + the ``count`` value. + + The buckets are defined below in ``bucket_option``. There + are N buckets. ``bucket_counts[0]`` is the number of samples + in the underflow bucket. ``bucket_counts[1]`` to + ``bucket_counts[N-1]`` are the numbers of samples in each of + the finite buckets. And + ``bucket_counts[N] is the number of samples in the overflow bucket. See the comments of``\ bucket_option\` + below for more details. + + Any suffix of trailing zeros may be omitted. + linear_buckets (google.cloud.servicecontrol_v1.types.Distribution.LinearBuckets): + Buckets with constant width. + + This field is a member of `oneof`_ ``bucket_option``. + exponential_buckets (google.cloud.servicecontrol_v1.types.Distribution.ExponentialBuckets): + Buckets with exponentially growing width. + + This field is a member of `oneof`_ ``bucket_option``. + explicit_buckets (google.cloud.servicecontrol_v1.types.Distribution.ExplicitBuckets): + Buckets with arbitrary user-provided width. + + This field is a member of `oneof`_ ``bucket_option``. + exemplars (MutableSequence[google.api.distribution_pb2.Exemplar]): + Example points. Must be in increasing order of ``value`` + field. + """ + + class LinearBuckets(proto.Message): + r"""Describing buckets with constant width. + + Attributes: + num_finite_buckets (int): + The number of finite buckets. With the underflow and + overflow buckets, the total number of buckets is + ``num_finite_buckets`` + 2. See comments on + ``bucket_options`` for details. + width (float): + The i'th linear bucket covers the interval [offset + (i-1) + \* width, offset + i \* width) where i ranges from 1 to + num_finite_buckets, inclusive. Must be strictly positive. + offset (float): + The i'th linear bucket covers the interval [offset + (i-1) + \* width, offset + i \* width) where i ranges from 1 to + num_finite_buckets, inclusive. + """ + + num_finite_buckets: int = proto.Field( + proto.INT32, + number=1, + ) + width: float = proto.Field( + proto.DOUBLE, + number=2, + ) + offset: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + class ExponentialBuckets(proto.Message): + r"""Describing buckets with exponentially growing width. + + Attributes: + num_finite_buckets (int): + The number of finite buckets. With the underflow and + overflow buckets, the total number of buckets is + ``num_finite_buckets`` + 2. See comments on + ``bucket_options`` for details. + growth_factor (float): + The i'th exponential bucket covers the interval [scale \* + growth_factor^(i-1), scale \* growth_factor^i) where i + ranges from 1 to num_finite_buckets inclusive. Must be + larger than 1.0. + scale (float): + The i'th exponential bucket covers the interval [scale \* + growth_factor^(i-1), scale \* growth_factor^i) where i + ranges from 1 to num_finite_buckets inclusive. Must be > 0. + """ + + num_finite_buckets: int = proto.Field( + proto.INT32, + number=1, + ) + growth_factor: float = proto.Field( + proto.DOUBLE, + number=2, + ) + scale: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + class ExplicitBuckets(proto.Message): + r"""Describing buckets with arbitrary user-provided width. + + Attributes: + bounds (MutableSequence[float]): + 'bound' is a list of strictly increasing boundaries between + buckets. Note that a list of length N-1 defines N buckets + because of fenceposting. See comments on ``bucket_options`` + for details. + + The i'th finite bucket covers the interval [bound[i-1], + bound[i]) where i ranges from 1 to bound_size() - 1. Note + that there are no finite buckets at all if 'bound' only + contains a single element; in that special case the single + bound defines the boundary between the underflow and + overflow buckets. + + bucket number lower bound upper bound i == 0 (underflow) + -inf bound[i] 0 < i < bound_size() bound[i-1] bound[i] i == + bound_size() (overflow) bound[i-1] +inf + """ + + bounds: MutableSequence[float] = proto.RepeatedField( + proto.DOUBLE, + number=1, + ) + + count: int = proto.Field( + proto.INT64, + number=1, + ) + mean: float = proto.Field( + proto.DOUBLE, + number=2, + ) + minimum: float = proto.Field( + proto.DOUBLE, + number=3, + ) + maximum: float = proto.Field( + proto.DOUBLE, + number=4, + ) + sum_of_squared_deviation: float = proto.Field( + proto.DOUBLE, + number=5, + ) + bucket_counts: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=6, + ) + linear_buckets: LinearBuckets = proto.Field( + proto.MESSAGE, + number=7, + oneof='bucket_option', + message=LinearBuckets, + ) + exponential_buckets: ExponentialBuckets = proto.Field( + proto.MESSAGE, + number=8, + oneof='bucket_option', + message=ExponentialBuckets, + ) + explicit_buckets: ExplicitBuckets = proto.Field( + proto.MESSAGE, + number=9, + oneof='bucket_option', + message=ExplicitBuckets, + ) + exemplars: MutableSequence[distribution_pb2.Distribution.Exemplar] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=distribution_pb2.Distribution.Exemplar, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/http_request.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/http_request.py new file mode 100644 index 000000000000..05dd929861ca --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/http_request.py @@ -0,0 +1,156 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.api.servicecontrol.v1', + manifest={ + 'HttpRequest', + }, +) + + +class HttpRequest(proto.Message): + r"""A common proto for logging HTTP requests. Only contains + semantics defined by the HTTP specification. Product-specific + logging information MUST be defined in a separate message. + + Attributes: + request_method (str): + The request method. Examples: ``"GET"``, ``"HEAD"``, + ``"PUT"``, ``"POST"``. + request_url (str): + The scheme (http, https), the host name, the path, and the + query portion of the URL that was requested. Example: + ``"http://example.com/some/info?color=red"``. + request_size (int): + The size of the HTTP request message in + bytes, including the request headers and the + request body. + status (int): + The response code indicating the status of + the response. Examples: 200, 404. + response_size (int): + The size of the HTTP response message sent + back to the client, in bytes, including the + response headers and the response body. + user_agent (str): + The user agent sent by the client. Example: + ``"Mozilla/4.0 (compatible; MSIE 6.0; Windows 98; Q312461; .NET CLR 1.0.3705)"``. + remote_ip (str): + The IP address (IPv4 or IPv6) of the client that issued the + HTTP request. Examples: ``"192.168.1.1"``, + ``"FE80::0202:B3FF:FE1E:8329"``. + server_ip (str): + The IP address (IPv4 or IPv6) of the origin + server that the request was sent to. + referer (str): + The referer URL of the request, as defined in `HTTP/1.1 + Header Field + Definitions `__. + latency (google.protobuf.duration_pb2.Duration): + The request processing latency on the server, + from the time the request was received until the + response was sent. + cache_lookup (bool): + Whether or not a cache lookup was attempted. + cache_hit (bool): + Whether or not an entity was served from + cache (with or without validation). + cache_validated_with_origin_server (bool): + Whether or not the response was validated with the origin + server before being served from cache. This field is only + meaningful if ``cache_hit`` is True. + cache_fill_bytes (int): + The number of HTTP response bytes inserted + into cache. Set only when a cache fill was + attempted. + protocol (str): + Protocol used for the request. Examples: + "HTTP/1.1", "HTTP/2", "websocket". + """ + + request_method: str = proto.Field( + proto.STRING, + number=1, + ) + request_url: str = proto.Field( + proto.STRING, + number=2, + ) + request_size: int = proto.Field( + proto.INT64, + number=3, + ) + status: int = proto.Field( + proto.INT32, + number=4, + ) + response_size: int = proto.Field( + proto.INT64, + number=5, + ) + user_agent: str = proto.Field( + proto.STRING, + number=6, + ) + remote_ip: str = proto.Field( + proto.STRING, + number=7, + ) + server_ip: str = proto.Field( + proto.STRING, + number=13, + ) + referer: str = proto.Field( + proto.STRING, + number=8, + ) + latency: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=14, + message=duration_pb2.Duration, + ) + cache_lookup: bool = proto.Field( + proto.BOOL, + number=11, + ) + cache_hit: bool = proto.Field( + proto.BOOL, + number=9, + ) + cache_validated_with_origin_server: bool = proto.Field( + proto.BOOL, + number=10, + ) + cache_fill_bytes: int = proto.Field( + proto.INT64, + number=12, + ) + protocol: str = proto.Field( + proto.STRING, + number=15, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/log_entry.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/log_entry.py new file mode 100644 index 000000000000..cdd0cb340a29 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/log_entry.py @@ -0,0 +1,238 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.servicecontrol_v1.types import http_request as gas_http_request +from google.logging.type import log_severity_pb2 # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.api.servicecontrol.v1', + manifest={ + 'LogEntry', + 'LogEntryOperation', + 'LogEntrySourceLocation', + }, +) + + +class LogEntry(proto.Message): + r"""An individual log entry. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The log to which this log entry belongs. Examples: + ``"syslog"``, ``"book_log"``. + timestamp (google.protobuf.timestamp_pb2.Timestamp): + The time the event described by the log entry + occurred. If omitted, defaults to operation + start time. + severity (google.logging.type.log_severity_pb2.LogSeverity): + The severity of the log entry. The default value is + ``LogSeverity.DEFAULT``. + http_request (google.cloud.servicecontrol_v1.types.HttpRequest): + Optional. Information about the HTTP request + associated with this log entry, if applicable. + trace (str): + Optional. Resource name of the trace associated with the log + entry, if any. If this field contains a relative resource + name, you can assume the name is relative to + ``//tracing.googleapis.com``. Example: + ``projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824`` + insert_id (str): + A unique ID for the log entry used for deduplication. If + omitted, the implementation will generate one based on + operation_id. + labels (MutableMapping[str, str]): + A set of user-defined (key, value) data that + provides additional information about the log + entry. + proto_payload (google.protobuf.any_pb2.Any): + The log entry payload, represented as a protocol buffer that + is expressed as a JSON object. The only accepted type + currently is [AuditLog][google.cloud.audit.AuditLog]. + + This field is a member of `oneof`_ ``payload``. + text_payload (str): + The log entry payload, represented as a + Unicode string (UTF-8). + + This field is a member of `oneof`_ ``payload``. + struct_payload (google.protobuf.struct_pb2.Struct): + The log entry payload, represented as a + structure that is expressed as a JSON object. + + This field is a member of `oneof`_ ``payload``. + operation (google.cloud.servicecontrol_v1.types.LogEntryOperation): + Optional. Information about an operation + associated with the log entry, if applicable. + source_location (google.cloud.servicecontrol_v1.types.LogEntrySourceLocation): + Optional. Source code location information + associated with the log entry, if any. + """ + + name: str = proto.Field( + proto.STRING, + number=10, + ) + timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + severity: log_severity_pb2.LogSeverity = proto.Field( + proto.ENUM, + number=12, + enum=log_severity_pb2.LogSeverity, + ) + http_request: gas_http_request.HttpRequest = proto.Field( + proto.MESSAGE, + number=14, + message=gas_http_request.HttpRequest, + ) + trace: str = proto.Field( + proto.STRING, + number=15, + ) + insert_id: str = proto.Field( + proto.STRING, + number=4, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + proto_payload: any_pb2.Any = proto.Field( + proto.MESSAGE, + number=2, + oneof='payload', + message=any_pb2.Any, + ) + text_payload: str = proto.Field( + proto.STRING, + number=3, + oneof='payload', + ) + struct_payload: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=6, + oneof='payload', + message=struct_pb2.Struct, + ) + operation: 'LogEntryOperation' = proto.Field( + proto.MESSAGE, + number=16, + message='LogEntryOperation', + ) + source_location: 'LogEntrySourceLocation' = proto.Field( + proto.MESSAGE, + number=17, + message='LogEntrySourceLocation', + ) + + +class LogEntryOperation(proto.Message): + r"""Additional information about a potentially long-running + operation with which a log entry is associated. + + Attributes: + id (str): + Optional. An arbitrary operation identifier. + Log entries with the same identifier are assumed + to be part of the same operation. + producer (str): + Optional. An arbitrary producer identifier. The combination + of ``id`` and ``producer`` must be globally unique. Examples + for ``producer``: ``"MyDivision.MyBigCompany.com"``, + ``"github.com/MyProject/MyApplication"``. + first (bool): + Optional. Set this to True if this is the + first log entry in the operation. + last (bool): + Optional. Set this to True if this is the + last log entry in the operation. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + producer: str = proto.Field( + proto.STRING, + number=2, + ) + first: bool = proto.Field( + proto.BOOL, + number=3, + ) + last: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class LogEntrySourceLocation(proto.Message): + r"""Additional information about the source code location that + produced the log entry. + + Attributes: + file (str): + Optional. Source file name. Depending on the + runtime environment, this might be a simple name + or a fully-qualified name. + line (int): + Optional. Line within the source file. + 1-based; 0 indicates no line number available. + function (str): + Optional. Human-readable name of the function or method + being invoked, with optional context such as the class or + package name. This information may be used in contexts such + as the logs viewer, where a file and line number are less + meaningful. The format can vary by language. For example: + ``qual.if.ied.Class.method`` (Java), ``dir/package.func`` + (Go), ``function`` (Python). + """ + + file: str = proto.Field( + proto.STRING, + number=1, + ) + line: int = proto.Field( + proto.INT64, + number=2, + ) + function: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/metric_value.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/metric_value.py new file mode 100644 index 000000000000..f7e89c80f019 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/metric_value.py @@ -0,0 +1,153 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.servicecontrol_v1.types import distribution as gas_distribution +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.api.servicecontrol.v1', + manifest={ + 'MetricValue', + 'MetricValueSet', + }, +) + + +class MetricValue(proto.Message): + r"""Represents a single metric value. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + labels (MutableMapping[str, str]): + The labels describing the metric value. See comments on + [google.api.servicecontrol.v1.Operation.labels][google.api.servicecontrol.v1.Operation.labels] + for the overriding relationship. Note that this map must not + contain monitored resource labels. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The start of the time period over which this metric value's + measurement applies. The time period has different semantics + for different metric types (cumulative, delta, and gauge). + See the metric definition documentation in the service + configuration for details. If not specified, + [google.api.servicecontrol.v1.Operation.start_time][google.api.servicecontrol.v1.Operation.start_time] + will be used. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end of the time period over which this metric value's + measurement applies. If not specified, + [google.api.servicecontrol.v1.Operation.end_time][google.api.servicecontrol.v1.Operation.end_time] + will be used. + bool_value (bool): + A boolean value. + + This field is a member of `oneof`_ ``value``. + int64_value (int): + A signed 64-bit integer value. + + This field is a member of `oneof`_ ``value``. + double_value (float): + A double precision floating point value. + + This field is a member of `oneof`_ ``value``. + string_value (str): + A text string value. + + This field is a member of `oneof`_ ``value``. + distribution_value (google.cloud.servicecontrol_v1.types.Distribution): + A distribution value. + + This field is a member of `oneof`_ ``value``. + """ + + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + bool_value: bool = proto.Field( + proto.BOOL, + number=4, + oneof='value', + ) + int64_value: int = proto.Field( + proto.INT64, + number=5, + oneof='value', + ) + double_value: float = proto.Field( + proto.DOUBLE, + number=6, + oneof='value', + ) + string_value: str = proto.Field( + proto.STRING, + number=7, + oneof='value', + ) + distribution_value: gas_distribution.Distribution = proto.Field( + proto.MESSAGE, + number=8, + oneof='value', + message=gas_distribution.Distribution, + ) + + +class MetricValueSet(proto.Message): + r"""Represents a set of metric values in the same metric. + Each metric value in the set should have a unique combination of + start time, end time, and label values. + + Attributes: + metric_name (str): + The metric name defined in the service + configuration. + metric_values (MutableSequence[google.cloud.servicecontrol_v1.types.MetricValue]): + The values in this metric. + """ + + metric_name: str = proto.Field( + proto.STRING, + number=1, + ) + metric_values: MutableSequence['MetricValue'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='MetricValue', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/operation.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/operation.py new file mode 100644 index 000000000000..6900116f3039 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/operation.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.servicecontrol_v1.types import log_entry +from google.cloud.servicecontrol_v1.types import metric_value +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.api.servicecontrol.v1', + manifest={ + 'Operation', + }, +) + + +class Operation(proto.Message): + r"""Represents information regarding an operation. + + Attributes: + operation_id (str): + Identity of the operation. This must be + unique within the scope of the service that + generated the operation. If the service calls + Check() and Report() on the same operation, the + two calls should carry the same id. + + UUID version 4 is recommended, though not + required. In scenarios where an operation is + computed from existing information and an + idempotent id is desirable for deduplication + purpose, UUID version 5 is recommended. See RFC + 4122 for details. + operation_name (str): + Fully qualified name of the operation. + Reserved for future use. + consumer_id (str): + Identity of the consumer who is using the service. This + field should be filled in for the operations initiated by a + consumer, but not for service-initiated operations that are + not related to a specific consumer. + + - This can be in one of the following formats: + + - project:PROJECT_ID, + - project\ ``_``\ number:PROJECT_NUMBER, + - projects/PROJECT_ID or PROJECT_NUMBER, + - folders/FOLDER_NUMBER, + - organizations/ORGANIZATION_NUMBER, + - api\ ``_``\ key:API_KEY. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Required. Start time of the operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End time of the operation. Required when the operation is + used in + [ServiceController.Report][google.api.servicecontrol.v1.ServiceController.Report], + but optional when the operation is used in + [ServiceController.Check][google.api.servicecontrol.v1.ServiceController.Check]. + labels (MutableMapping[str, str]): + Labels describing the operation. Only the following labels + are allowed: + + - Labels describing monitored resources as defined in the + service configuration. + - Default labels of metric values. When specified, labels + defined in the metric value override these default. + - The following labels defined by Google Cloud Platform: + + - ``cloud.googleapis.com/location`` describing the + location where the operation happened, + - ``servicecontrol.googleapis.com/user_agent`` + describing the user agent of the API request, + - ``servicecontrol.googleapis.com/service_agent`` + describing the service used to handle the API request + (e.g. ESP), + - ``servicecontrol.googleapis.com/platform`` describing + the platform where the API is served, such as App + Engine, Compute Engine, or Kubernetes Engine. + metric_value_sets (MutableSequence[google.cloud.servicecontrol_v1.types.MetricValueSet]): + Represents information about this operation. + Each MetricValueSet corresponds to a metric + defined in the service configuration. The data + type used in the MetricValueSet must agree with + the data type specified in the metric + definition. + + Within a single operation, it is not allowed to + have more than one MetricValue instances that + have the same metric names and identical label + value combinations. If a request has such + duplicated MetricValue instances, the entire + request is rejected with an invalid argument + error. + log_entries (MutableSequence[google.cloud.servicecontrol_v1.types.LogEntry]): + Represents information to be logged. + importance (google.cloud.servicecontrol_v1.types.Operation.Importance): + DO NOT USE. This is an experimental field. + extensions (MutableSequence[google.protobuf.any_pb2.Any]): + Unimplemented. + """ + class Importance(proto.Enum): + r"""Defines the importance of the data contained in the + operation. + + Values: + LOW (0): + Allows data caching, batching, and + aggregation. It provides higher performance with + higher data loss risk. + HIGH (1): + Disables data aggregation to minimize data + loss. It is for operations that contains + significant monetary value or audit trail. This + feature only applies to the client libraries. + """ + LOW = 0 + HIGH = 1 + + operation_id: str = proto.Field( + proto.STRING, + number=1, + ) + operation_name: str = proto.Field( + proto.STRING, + number=2, + ) + consumer_id: str = proto.Field( + proto.STRING, + number=3, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + metric_value_sets: MutableSequence[metric_value.MetricValueSet] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=metric_value.MetricValueSet, + ) + log_entries: MutableSequence[log_entry.LogEntry] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=log_entry.LogEntry, + ) + importance: Importance = proto.Field( + proto.ENUM, + number=11, + enum=Importance, + ) + extensions: MutableSequence[any_pb2.Any] = proto.RepeatedField( + proto.MESSAGE, + number=16, + message=any_pb2.Any, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/quota_controller.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/quota_controller.py new file mode 100644 index 000000000000..7671d9ee767c --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/quota_controller.py @@ -0,0 +1,328 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.servicecontrol_v1.types import metric_value +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.api.servicecontrol.v1', + manifest={ + 'AllocateQuotaRequest', + 'QuotaOperation', + 'AllocateQuotaResponse', + 'QuotaError', + }, +) + + +class AllocateQuotaRequest(proto.Message): + r"""Request message for the AllocateQuota method. + + Attributes: + service_name (str): + Name of the service as specified in the service + configuration. For example, ``"pubsub.googleapis.com"``. + + See [google.api.Service][google.api.Service] for the + definition of a service name. + allocate_operation (google.cloud.servicecontrol_v1.types.QuotaOperation): + Operation that describes the quota + allocation. + service_config_id (str): + Specifies which version of service + configuration should be used to process the + request. If unspecified or no matching version + can be found, the latest one will be used. + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + allocate_operation: 'QuotaOperation' = proto.Field( + proto.MESSAGE, + number=2, + message='QuotaOperation', + ) + service_config_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class QuotaOperation(proto.Message): + r"""Represents information regarding a quota operation. + + Attributes: + operation_id (str): + Identity of the operation. This is expected to be unique + within the scope of the service that generated the + operation, and guarantees idempotency in case of retries. + + In order to ensure best performance and latency in the Quota + backends, operation_ids are optimally associated with time, + so that related operations can be accessed fast in storage. + For this reason, the recommended token for services that + intend to operate at a high QPS is Unix time in nanos + UUID + method_name (str): + Fully qualified name of the API method for which this quota + operation is requested. This name is used for matching quota + rules or metric rules and billing status rules defined in + service configuration. + + This field should not be set if any of the following is + true: (1) the quota operation is performed on non-API + resources. (2) quota_metrics is set because the caller is + doing quota override. + + Example of an RPC method name: + google.example.library.v1.LibraryService.CreateShelf + consumer_id (str): + Identity of the consumer for whom this quota operation is + being performed. + + This can be in one of the following formats: + project:, project_number:, + api_key:. + labels (MutableMapping[str, str]): + Labels describing the operation. + quota_metrics (MutableSequence[google.cloud.servicecontrol_v1.types.MetricValueSet]): + Represents information about this operation. Each + MetricValueSet corresponds to a metric defined in the + service configuration. The data type used in the + MetricValueSet must agree with the data type specified in + the metric definition. + + Within a single operation, it is not allowed to have more + than one MetricValue instances that have the same metric + names and identical label value combinations. If a request + has such duplicated MetricValue instances, the entire + request is rejected with an invalid argument error. + + This field is mutually exclusive with method_name. + quota_mode (google.cloud.servicecontrol_v1.types.QuotaOperation.QuotaMode): + Quota mode for this operation. + """ + class QuotaMode(proto.Enum): + r"""Supported quota modes. + + Values: + UNSPECIFIED (0): + Guard against implicit default. Must not be + used. + NORMAL (1): + For AllocateQuota request, allocates quota + for the amount specified in the service + configuration or specified using the quota + metrics. If the amount is higher than the + available quota, allocation error will be + returned and no quota will be allocated. + If multiple quotas are part of the request, and + one fails, none of the quotas are allocated or + released. + BEST_EFFORT (2): + The operation allocates quota for the amount specified in + the service configuration or specified using the quota + metrics. If the amount is higher than the available quota, + request does not fail but all available quota will be + allocated. For rate quota, BEST_EFFORT will continue to + deduct from other groups even if one does not have enough + quota. For allocation, it will find the minimum available + amount across all groups and deduct that amount from all the + affected groups. + CHECK_ONLY (3): + For AllocateQuota request, only checks if + there is enough quota available and does not + change the available quota. No lock is placed on + the available quota either. + QUERY_ONLY (4): + Unimplemented. When used in + AllocateQuotaRequest, this returns the effective + quota limit(s) in the response, and no quota + check will be performed. Not supported for other + requests, and even for AllocateQuotaRequest, + this is currently supported only for allowlisted + services. + ADJUST_ONLY (5): + The operation allocates quota for the amount + specified in the service configuration or + specified using the quota metrics. If the + requested amount is higher than the available + quota, request does not fail and remaining quota + would become negative (going over the limit). + Not supported for Rate Quota. + """ + UNSPECIFIED = 0 + NORMAL = 1 + BEST_EFFORT = 2 + CHECK_ONLY = 3 + QUERY_ONLY = 4 + ADJUST_ONLY = 5 + + operation_id: str = proto.Field( + proto.STRING, + number=1, + ) + method_name: str = proto.Field( + proto.STRING, + number=2, + ) + consumer_id: str = proto.Field( + proto.STRING, + number=3, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + quota_metrics: MutableSequence[metric_value.MetricValueSet] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=metric_value.MetricValueSet, + ) + quota_mode: QuotaMode = proto.Field( + proto.ENUM, + number=6, + enum=QuotaMode, + ) + + +class AllocateQuotaResponse(proto.Message): + r"""Response message for the AllocateQuota method. + + Attributes: + operation_id (str): + The same operation_id value used in the + AllocateQuotaRequest. Used for logging and diagnostics + purposes. + allocate_errors (MutableSequence[google.cloud.servicecontrol_v1.types.QuotaError]): + Indicates the decision of the allocate. + quota_metrics (MutableSequence[google.cloud.servicecontrol_v1.types.MetricValueSet]): + Quota metrics to indicate the result of allocation. + Depending on the request, one or more of the following + metrics will be included: + + 1. Per quota group or per quota metric incremental usage + will be specified using the following delta metric : + "serviceruntime.googleapis.com/api/consumer/quota_used_count" + + 2. The quota limit reached condition will be specified using + the following boolean metric : + "serviceruntime.googleapis.com/quota/exceeded". + service_config_id (str): + ID of the actual config used to process the + request. + """ + + operation_id: str = proto.Field( + proto.STRING, + number=1, + ) + allocate_errors: MutableSequence['QuotaError'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='QuotaError', + ) + quota_metrics: MutableSequence[metric_value.MetricValueSet] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=metric_value.MetricValueSet, + ) + service_config_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class QuotaError(proto.Message): + r"""Represents error information for + [QuotaOperation][google.api.servicecontrol.v1.QuotaOperation]. + + Attributes: + code (google.cloud.servicecontrol_v1.types.QuotaError.Code): + Error code. + subject (str): + Subject to whom this error applies. See the + specific enum for more details on this field. + For example, "clientip:" + or "project:". + description (str): + Free-form text that provides details on the + cause of the error. + status (google.rpc.status_pb2.Status): + Contains additional information about the quota error. If + available, ``status.code`` will be non zero. + """ + class Code(proto.Enum): + r"""Error codes related to project config validations are deprecated + since the quota controller methods do not perform these validations. + Instead services have to call the Check method, without + quota_properties field, to perform these validations before calling + the quota controller methods. These methods check only for project + deletion to be wipe out compliant. + + Values: + UNSPECIFIED (0): + This is never used. + RESOURCE_EXHAUSTED (8): + Quota allocation failed. Same as + [google.rpc.Code.RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED]. + BILLING_NOT_ACTIVE (107): + Consumer cannot access the service because + the service requires active billing. + PROJECT_DELETED (108): + Consumer's project has been marked as deleted + (soft deletion). + API_KEY_INVALID (105): + Specified API key is invalid. + API_KEY_EXPIRED (112): + Specified API Key has expired. + """ + UNSPECIFIED = 0 + RESOURCE_EXHAUSTED = 8 + BILLING_NOT_ACTIVE = 107 + PROJECT_DELETED = 108 + API_KEY_INVALID = 105 + API_KEY_EXPIRED = 112 + + code: Code = proto.Field( + proto.ENUM, + number=1, + enum=Code, + ) + subject: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/service_controller.py b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/service_controller.py new file mode 100644 index 000000000000..b2d7e23d1b0a --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/google/cloud/servicecontrol_v1/types/service_controller.py @@ -0,0 +1,335 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.servicecontrol_v1.types import check_error +from google.cloud.servicecontrol_v1.types import operation as gas_operation +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.api.servicecontrol.v1', + manifest={ + 'CheckRequest', + 'CheckResponse', + 'ReportRequest', + 'ReportResponse', + }, +) + + +class CheckRequest(proto.Message): + r"""Request message for the Check method. + + Attributes: + service_name (str): + The service name as specified in its service configuration. + For example, ``"pubsub.googleapis.com"``. + + See + `google.api.Service `__ + for the definition of a service name. + operation (google.cloud.servicecontrol_v1.types.Operation): + The operation to be checked. + service_config_id (str): + Specifies which version of service + configuration should be used to process the + request. + + If unspecified or no matching version can be + found, the latest one will be used. + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + operation: gas_operation.Operation = proto.Field( + proto.MESSAGE, + number=2, + message=gas_operation.Operation, + ) + service_config_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CheckResponse(proto.Message): + r"""Response message for the Check method. + + Attributes: + operation_id (str): + The same operation_id value used in the + [CheckRequest][google.api.servicecontrol.v1.CheckRequest]. + Used for logging and diagnostics purposes. + check_errors (MutableSequence[google.cloud.servicecontrol_v1.types.CheckError]): + Indicate the decision of the check. + + If no check errors are present, the service + should process the operation. Otherwise the + service should use the list of errors to + determine the appropriate action. + service_config_id (str): + The actual config id used to process the + request. + service_rollout_id (str): + The current service rollout id used to + process the request. + check_info (google.cloud.servicecontrol_v1.types.CheckResponse.CheckInfo): + Feedback data returned from the server during + processing a Check request. + """ + + class CheckInfo(proto.Message): + r"""Contains additional information about the check operation. + + Attributes: + unused_arguments (MutableSequence[str]): + A list of fields and label keys that are + ignored by the server. The client doesn't need + to send them for following requests to improve + performance and allow better aggregation. + consumer_info (google.cloud.servicecontrol_v1.types.CheckResponse.ConsumerInfo): + Consumer info of this check. + api_key_uid (str): + The unique id of the api key in the format of + "apikey:". This field will be populated + when the consumer passed to Service Control is + an API key and all the API key related + validations are successful. + """ + + unused_arguments: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + consumer_info: 'CheckResponse.ConsumerInfo' = proto.Field( + proto.MESSAGE, + number=2, + message='CheckResponse.ConsumerInfo', + ) + api_key_uid: str = proto.Field( + proto.STRING, + number=5, + ) + + class ConsumerInfo(proto.Message): + r"""``ConsumerInfo`` provides information about the consumer. + + Attributes: + project_number (int): + The Google cloud project number, e.g. + 1234567890. A value of 0 indicates no project + number is found. + + NOTE: This field is deprecated after we support + flexible consumer id. New code should not depend + on this field anymore. + type_ (google.cloud.servicecontrol_v1.types.CheckResponse.ConsumerInfo.ConsumerType): + The type of the consumer which should have been defined in + `Google Resource + Manager `__. + consumer_number (int): + The consumer identity number, can be Google + cloud project number, folder number or + organization number e.g. 1234567890. A value of + 0 indicates no consumer number is found. + """ + class ConsumerType(proto.Enum): + r"""The type of the consumer as defined in `Google Resource + Manager `__. + + Values: + CONSUMER_TYPE_UNSPECIFIED (0): + This is never used. + PROJECT (1): + The consumer is a Google Cloud Project. + FOLDER (2): + The consumer is a Google Cloud Folder. + ORGANIZATION (3): + The consumer is a Google Cloud Organization. + SERVICE_SPECIFIC (4): + Service-specific resource container which is + defined by the service producer to offer their + users the ability to manage service control + functionalities at a finer level of granularity + than the PROJECT. + """ + CONSUMER_TYPE_UNSPECIFIED = 0 + PROJECT = 1 + FOLDER = 2 + ORGANIZATION = 3 + SERVICE_SPECIFIC = 4 + + project_number: int = proto.Field( + proto.INT64, + number=1, + ) + type_: 'CheckResponse.ConsumerInfo.ConsumerType' = proto.Field( + proto.ENUM, + number=2, + enum='CheckResponse.ConsumerInfo.ConsumerType', + ) + consumer_number: int = proto.Field( + proto.INT64, + number=3, + ) + + operation_id: str = proto.Field( + proto.STRING, + number=1, + ) + check_errors: MutableSequence[check_error.CheckError] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=check_error.CheckError, + ) + service_config_id: str = proto.Field( + proto.STRING, + number=5, + ) + service_rollout_id: str = proto.Field( + proto.STRING, + number=11, + ) + check_info: CheckInfo = proto.Field( + proto.MESSAGE, + number=6, + message=CheckInfo, + ) + + +class ReportRequest(proto.Message): + r"""Request message for the Report method. + + Attributes: + service_name (str): + The service name as specified in its service configuration. + For example, ``"pubsub.googleapis.com"``. + + See + `google.api.Service `__ + for the definition of a service name. + operations (MutableSequence[google.cloud.servicecontrol_v1.types.Operation]): + Operations to be reported. + + Typically the service should report one operation per + request. Putting multiple operations into a single request + is allowed, but should be used only when multiple operations + are natually available at the time of the report. + + There is no limit on the number of operations in the same + ReportRequest, however the ReportRequest size should be no + larger than 1MB. See + [ReportResponse.report_errors][google.api.servicecontrol.v1.ReportResponse.report_errors] + for partial failure behavior. + service_config_id (str): + Specifies which version of service config + should be used to process the request. + + If unspecified or no matching version can be + found, the latest one will be used. + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + operations: MutableSequence[gas_operation.Operation] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gas_operation.Operation, + ) + service_config_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ReportResponse(proto.Message): + r"""Response message for the Report method. + + Attributes: + report_errors (MutableSequence[google.cloud.servicecontrol_v1.types.ReportResponse.ReportError]): + Partial failures, one for each ``Operation`` in the request + that failed processing. There are three possible + combinations of the RPC status: + + 1. The combination of a successful RPC status and an empty + ``report_errors`` list indicates a complete success where + all ``Operations`` in the request are processed + successfully. + 2. The combination of a successful RPC status and a + non-empty ``report_errors`` list indicates a partial + success where some ``Operations`` in the request + succeeded. Each ``Operation`` that failed processing has + a corresponding item in this list. + 3. A failed RPC status indicates a general non-deterministic + failure. When this happens, it's impossible to know which + of the 'Operations' in the request succeeded or failed. + service_config_id (str): + The actual config id used to process the + request. + service_rollout_id (str): + The current service rollout id used to + process the request. + """ + + class ReportError(proto.Message): + r"""Represents the processing error of one + [Operation][google.api.servicecontrol.v1.Operation] in the request. + + Attributes: + operation_id (str): + The + [Operation.operation_id][google.api.servicecontrol.v1.Operation.operation_id] + value from the request. + status (google.rpc.status_pb2.Status): + Details of the error when processing the + [Operation][google.api.servicecontrol.v1.Operation]. + """ + + operation_id: str = proto.Field( + proto.STRING, + number=1, + ) + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + + report_errors: MutableSequence[ReportError] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=ReportError, + ) + service_config_id: str = proto.Field( + proto.STRING, + number=2, + ) + service_rollout_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-service-control/v1/mypy.ini b/owl-bot-staging/google-cloud-service-control/v1/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-cloud-service-control/v1/noxfile.py b/owl-bot-staging/google-cloud-service-control/v1/noxfile.py new file mode 100644 index 000000000000..36b14906de0b --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/noxfile.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-cloud-service-control' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.13" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): + """Run the unit test suite.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/servicecontrol_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + +@nox.session(python=ALL_PYTHON[-1]) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): + """Run the unit test suite against pre-release versions of dependencies.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/servicecontrol_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_quota_controller_allocate_quota_async.py b/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_quota_controller_allocate_quota_async.py new file mode 100644 index 000000000000..604df09fe604 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_quota_controller_allocate_quota_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AllocateQuota +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-control + + +# [START servicecontrol_v1_generated_QuotaController_AllocateQuota_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicecontrol_v1 + + +async def sample_allocate_quota(): + # Create a client + client = servicecontrol_v1.QuotaControllerAsyncClient() + + # Initialize request argument(s) + request = servicecontrol_v1.AllocateQuotaRequest( + ) + + # Make the request + response = await client.allocate_quota(request=request) + + # Handle the response + print(response) + +# [END servicecontrol_v1_generated_QuotaController_AllocateQuota_async] diff --git a/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_quota_controller_allocate_quota_sync.py b/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_quota_controller_allocate_quota_sync.py new file mode 100644 index 000000000000..ef0c76a199d5 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_quota_controller_allocate_quota_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AllocateQuota +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-control + + +# [START servicecontrol_v1_generated_QuotaController_AllocateQuota_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicecontrol_v1 + + +def sample_allocate_quota(): + # Create a client + client = servicecontrol_v1.QuotaControllerClient() + + # Initialize request argument(s) + request = servicecontrol_v1.AllocateQuotaRequest( + ) + + # Make the request + response = client.allocate_quota(request=request) + + # Handle the response + print(response) + +# [END servicecontrol_v1_generated_QuotaController_AllocateQuota_sync] diff --git a/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_check_async.py b/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_check_async.py new file mode 100644 index 000000000000..718920009c19 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_check_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Check +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-control + + +# [START servicecontrol_v1_generated_ServiceController_Check_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicecontrol_v1 + + +async def sample_check(): + # Create a client + client = servicecontrol_v1.ServiceControllerAsyncClient() + + # Initialize request argument(s) + request = servicecontrol_v1.CheckRequest( + ) + + # Make the request + response = await client.check(request=request) + + # Handle the response + print(response) + +# [END servicecontrol_v1_generated_ServiceController_Check_async] diff --git a/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_check_sync.py b/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_check_sync.py new file mode 100644 index 000000000000..63f4a4a3e36c --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_check_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Check +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-control + + +# [START servicecontrol_v1_generated_ServiceController_Check_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicecontrol_v1 + + +def sample_check(): + # Create a client + client = servicecontrol_v1.ServiceControllerClient() + + # Initialize request argument(s) + request = servicecontrol_v1.CheckRequest( + ) + + # Make the request + response = client.check(request=request) + + # Handle the response + print(response) + +# [END servicecontrol_v1_generated_ServiceController_Check_sync] diff --git a/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_report_async.py b/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_report_async.py new file mode 100644 index 000000000000..ae6a11e385f9 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_report_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Report +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-control + + +# [START servicecontrol_v1_generated_ServiceController_Report_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicecontrol_v1 + + +async def sample_report(): + # Create a client + client = servicecontrol_v1.ServiceControllerAsyncClient() + + # Initialize request argument(s) + request = servicecontrol_v1.ReportRequest( + ) + + # Make the request + response = await client.report(request=request) + + # Handle the response + print(response) + +# [END servicecontrol_v1_generated_ServiceController_Report_async] diff --git a/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_report_sync.py b/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_report_sync.py new file mode 100644 index 000000000000..a4b4eea99af4 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/servicecontrol_v1_generated_service_controller_report_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Report +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-control + + +# [START servicecontrol_v1_generated_ServiceController_Report_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicecontrol_v1 + + +def sample_report(): + # Create a client + client = servicecontrol_v1.ServiceControllerClient() + + # Initialize request argument(s) + request = servicecontrol_v1.ReportRequest( + ) + + # Make the request + response = client.report(request=request) + + # Handle the response + print(response) + +# [END servicecontrol_v1_generated_ServiceController_Report_sync] diff --git a/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v1.json b/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v1.json new file mode 100644 index 000000000000..ae2dfb3a75ea --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v1.json @@ -0,0 +1,474 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.api.servicecontrol.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-service-control", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicecontrol_v1.QuotaControllerAsyncClient", + "shortName": "QuotaControllerAsyncClient" + }, + "fullName": "google.cloud.servicecontrol_v1.QuotaControllerAsyncClient.allocate_quota", + "method": { + "fullName": "google.api.servicecontrol.v1.QuotaController.AllocateQuota", + "service": { + "fullName": "google.api.servicecontrol.v1.QuotaController", + "shortName": "QuotaController" + }, + "shortName": "AllocateQuota" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicecontrol_v1.types.AllocateQuotaRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicecontrol_v1.types.AllocateQuotaResponse", + "shortName": "allocate_quota" + }, + "description": "Sample for AllocateQuota", + "file": "servicecontrol_v1_generated_quota_controller_allocate_quota_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicecontrol_v1_generated_QuotaController_AllocateQuota_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicecontrol_v1_generated_quota_controller_allocate_quota_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicecontrol_v1.QuotaControllerClient", + "shortName": "QuotaControllerClient" + }, + "fullName": "google.cloud.servicecontrol_v1.QuotaControllerClient.allocate_quota", + "method": { + "fullName": "google.api.servicecontrol.v1.QuotaController.AllocateQuota", + "service": { + "fullName": "google.api.servicecontrol.v1.QuotaController", + "shortName": "QuotaController" + }, + "shortName": "AllocateQuota" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicecontrol_v1.types.AllocateQuotaRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicecontrol_v1.types.AllocateQuotaResponse", + "shortName": "allocate_quota" + }, + "description": "Sample for AllocateQuota", + "file": "servicecontrol_v1_generated_quota_controller_allocate_quota_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicecontrol_v1_generated_QuotaController_AllocateQuota_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicecontrol_v1_generated_quota_controller_allocate_quota_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicecontrol_v1.ServiceControllerAsyncClient", + "shortName": "ServiceControllerAsyncClient" + }, + "fullName": "google.cloud.servicecontrol_v1.ServiceControllerAsyncClient.check", + "method": { + "fullName": "google.api.servicecontrol.v1.ServiceController.Check", + "service": { + "fullName": "google.api.servicecontrol.v1.ServiceController", + "shortName": "ServiceController" + }, + "shortName": "Check" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicecontrol_v1.types.CheckRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicecontrol_v1.types.CheckResponse", + "shortName": "check" + }, + "description": "Sample for Check", + "file": "servicecontrol_v1_generated_service_controller_check_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicecontrol_v1_generated_ServiceController_Check_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicecontrol_v1_generated_service_controller_check_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicecontrol_v1.ServiceControllerClient", + "shortName": "ServiceControllerClient" + }, + "fullName": "google.cloud.servicecontrol_v1.ServiceControllerClient.check", + "method": { + "fullName": "google.api.servicecontrol.v1.ServiceController.Check", + "service": { + "fullName": "google.api.servicecontrol.v1.ServiceController", + "shortName": "ServiceController" + }, + "shortName": "Check" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicecontrol_v1.types.CheckRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicecontrol_v1.types.CheckResponse", + "shortName": "check" + }, + "description": "Sample for Check", + "file": "servicecontrol_v1_generated_service_controller_check_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicecontrol_v1_generated_ServiceController_Check_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicecontrol_v1_generated_service_controller_check_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicecontrol_v1.ServiceControllerAsyncClient", + "shortName": "ServiceControllerAsyncClient" + }, + "fullName": "google.cloud.servicecontrol_v1.ServiceControllerAsyncClient.report", + "method": { + "fullName": "google.api.servicecontrol.v1.ServiceController.Report", + "service": { + "fullName": "google.api.servicecontrol.v1.ServiceController", + "shortName": "ServiceController" + }, + "shortName": "Report" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicecontrol_v1.types.ReportRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicecontrol_v1.types.ReportResponse", + "shortName": "report" + }, + "description": "Sample for Report", + "file": "servicecontrol_v1_generated_service_controller_report_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicecontrol_v1_generated_ServiceController_Report_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicecontrol_v1_generated_service_controller_report_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicecontrol_v1.ServiceControllerClient", + "shortName": "ServiceControllerClient" + }, + "fullName": "google.cloud.servicecontrol_v1.ServiceControllerClient.report", + "method": { + "fullName": "google.api.servicecontrol.v1.ServiceController.Report", + "service": { + "fullName": "google.api.servicecontrol.v1.ServiceController", + "shortName": "ServiceController" + }, + "shortName": "Report" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicecontrol_v1.types.ReportRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicecontrol_v1.types.ReportResponse", + "shortName": "report" + }, + "description": "Sample for Report", + "file": "servicecontrol_v1_generated_service_controller_report_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicecontrol_v1_generated_ServiceController_Report_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicecontrol_v1_generated_service_controller_report_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-cloud-service-control/v1/scripts/fixup_servicecontrol_v1_keywords.py b/owl-bot-staging/google-cloud-service-control/v1/scripts/fixup_servicecontrol_v1_keywords.py new file mode 100644 index 000000000000..88ed140322e9 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/scripts/fixup_servicecontrol_v1_keywords.py @@ -0,0 +1,178 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class servicecontrolCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'allocate_quota': ('service_name', 'allocate_operation', 'service_config_id', ), + 'check': ('service_name', 'operation', 'service_config_id', ), + 'report': ('service_name', 'operations', 'service_config_id', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=servicecontrolCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the servicecontrol client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-service-control/v1/setup.py b/owl-bot-staging/google-cloud-service-control/v1/setup.py new file mode 100644 index 000000000000..f6f20f5adf64 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/setup.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-service-control' + + +description = "Google Cloud Service Control API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/servicecontrol/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.13.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-control/v1/tests/__init__.py b/owl-bot-staging/google-cloud-service-control/v1/tests/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-control/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-service-control/v1/tests/unit/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/servicecontrol_v1/__init__.py b/owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/servicecontrol_v1/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/servicecontrol_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/servicecontrol_v1/test_quota_controller.py b/owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/servicecontrol_v1/test_quota_controller.py new file mode 100644 index 000000000000..d208ca44203a --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/servicecontrol_v1/test_quota_controller.py @@ -0,0 +1,1864 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api import distribution_pb2 # type: ignore +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.servicecontrol_v1.services.quota_controller import QuotaControllerAsyncClient +from google.cloud.servicecontrol_v1.services.quota_controller import QuotaControllerClient +from google.cloud.servicecontrol_v1.services.quota_controller import transports +from google.cloud.servicecontrol_v1.types import distribution as gas_distribution +from google.cloud.servicecontrol_v1.types import metric_value +from google.cloud.servicecontrol_v1.types import quota_controller +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert QuotaControllerClient._get_default_mtls_endpoint(None) is None + assert QuotaControllerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert QuotaControllerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert QuotaControllerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert QuotaControllerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert QuotaControllerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert QuotaControllerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert QuotaControllerClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert QuotaControllerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + QuotaControllerClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert QuotaControllerClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert QuotaControllerClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert QuotaControllerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + QuotaControllerClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert QuotaControllerClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert QuotaControllerClient._get_client_cert_source(None, False) is None + assert QuotaControllerClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert QuotaControllerClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert QuotaControllerClient._get_client_cert_source(None, True) is mock_default_cert_source + assert QuotaControllerClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(QuotaControllerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(QuotaControllerClient)) +@mock.patch.object(QuotaControllerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(QuotaControllerAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = QuotaControllerClient._DEFAULT_UNIVERSE + default_endpoint = QuotaControllerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = QuotaControllerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert QuotaControllerClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert QuotaControllerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == QuotaControllerClient.DEFAULT_MTLS_ENDPOINT + assert QuotaControllerClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert QuotaControllerClient._get_api_endpoint(None, None, default_universe, "always") == QuotaControllerClient.DEFAULT_MTLS_ENDPOINT + assert QuotaControllerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == QuotaControllerClient.DEFAULT_MTLS_ENDPOINT + assert QuotaControllerClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert QuotaControllerClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + QuotaControllerClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert QuotaControllerClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert QuotaControllerClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert QuotaControllerClient._get_universe_domain(None, None) == QuotaControllerClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + QuotaControllerClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (QuotaControllerClient, "grpc"), + (QuotaControllerAsyncClient, "grpc_asyncio"), + (QuotaControllerClient, "rest"), +]) +def test_quota_controller_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'servicecontrol.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://servicecontrol.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.QuotaControllerGrpcTransport, "grpc"), + (transports.QuotaControllerGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.QuotaControllerRestTransport, "rest"), +]) +def test_quota_controller_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (QuotaControllerClient, "grpc"), + (QuotaControllerAsyncClient, "grpc_asyncio"), + (QuotaControllerClient, "rest"), +]) +def test_quota_controller_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'servicecontrol.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://servicecontrol.googleapis.com' + ) + + +def test_quota_controller_client_get_transport_class(): + transport = QuotaControllerClient.get_transport_class() + available_transports = [ + transports.QuotaControllerGrpcTransport, + transports.QuotaControllerRestTransport, + ] + assert transport in available_transports + + transport = QuotaControllerClient.get_transport_class("grpc") + assert transport == transports.QuotaControllerGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (QuotaControllerClient, transports.QuotaControllerGrpcTransport, "grpc"), + (QuotaControllerAsyncClient, transports.QuotaControllerGrpcAsyncIOTransport, "grpc_asyncio"), + (QuotaControllerClient, transports.QuotaControllerRestTransport, "rest"), +]) +@mock.patch.object(QuotaControllerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(QuotaControllerClient)) +@mock.patch.object(QuotaControllerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(QuotaControllerAsyncClient)) +def test_quota_controller_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(QuotaControllerClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(QuotaControllerClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (QuotaControllerClient, transports.QuotaControllerGrpcTransport, "grpc", "true"), + (QuotaControllerAsyncClient, transports.QuotaControllerGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (QuotaControllerClient, transports.QuotaControllerGrpcTransport, "grpc", "false"), + (QuotaControllerAsyncClient, transports.QuotaControllerGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (QuotaControllerClient, transports.QuotaControllerRestTransport, "rest", "true"), + (QuotaControllerClient, transports.QuotaControllerRestTransport, "rest", "false"), +]) +@mock.patch.object(QuotaControllerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(QuotaControllerClient)) +@mock.patch.object(QuotaControllerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(QuotaControllerAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_quota_controller_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + QuotaControllerClient, QuotaControllerAsyncClient +]) +@mock.patch.object(QuotaControllerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(QuotaControllerClient)) +@mock.patch.object(QuotaControllerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(QuotaControllerAsyncClient)) +def test_quota_controller_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + QuotaControllerClient, QuotaControllerAsyncClient +]) +@mock.patch.object(QuotaControllerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(QuotaControllerClient)) +@mock.patch.object(QuotaControllerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(QuotaControllerAsyncClient)) +def test_quota_controller_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = QuotaControllerClient._DEFAULT_UNIVERSE + default_endpoint = QuotaControllerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = QuotaControllerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (QuotaControllerClient, transports.QuotaControllerGrpcTransport, "grpc"), + (QuotaControllerAsyncClient, transports.QuotaControllerGrpcAsyncIOTransport, "grpc_asyncio"), + (QuotaControllerClient, transports.QuotaControllerRestTransport, "rest"), +]) +def test_quota_controller_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (QuotaControllerClient, transports.QuotaControllerGrpcTransport, "grpc", grpc_helpers), + (QuotaControllerAsyncClient, transports.QuotaControllerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (QuotaControllerClient, transports.QuotaControllerRestTransport, "rest", None), +]) +def test_quota_controller_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_quota_controller_client_client_options_from_dict(): + with mock.patch('google.cloud.servicecontrol_v1.services.quota_controller.transports.QuotaControllerGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = QuotaControllerClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (QuotaControllerClient, transports.QuotaControllerGrpcTransport, "grpc", grpc_helpers), + (QuotaControllerAsyncClient, transports.QuotaControllerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_quota_controller_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "servicecontrol.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/servicecontrol', +), + scopes=None, + default_host="servicecontrol.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + quota_controller.AllocateQuotaRequest, + dict, +]) +def test_allocate_quota(request_type, transport: str = 'grpc'): + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.allocate_quota), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = quota_controller.AllocateQuotaResponse( + operation_id='operation_id_value', + service_config_id='service_config_id_value', + ) + response = client.allocate_quota(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = quota_controller.AllocateQuotaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, quota_controller.AllocateQuotaResponse) + assert response.operation_id == 'operation_id_value' + assert response.service_config_id == 'service_config_id_value' + + +def test_allocate_quota_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = quota_controller.AllocateQuotaRequest( + service_name='service_name_value', + service_config_id='service_config_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.allocate_quota), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.allocate_quota(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == quota_controller.AllocateQuotaRequest( + service_name='service_name_value', + service_config_id='service_config_id_value', + ) + +def test_allocate_quota_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.allocate_quota in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.allocate_quota] = mock_rpc + request = {} + client.allocate_quota(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.allocate_quota(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_allocate_quota_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = QuotaControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.allocate_quota in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.allocate_quota] = mock_rpc + + request = {} + await client.allocate_quota(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.allocate_quota(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_allocate_quota_async(transport: str = 'grpc_asyncio', request_type=quota_controller.AllocateQuotaRequest): + client = QuotaControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.allocate_quota), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(quota_controller.AllocateQuotaResponse( + operation_id='operation_id_value', + service_config_id='service_config_id_value', + )) + response = await client.allocate_quota(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = quota_controller.AllocateQuotaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, quota_controller.AllocateQuotaResponse) + assert response.operation_id == 'operation_id_value' + assert response.service_config_id == 'service_config_id_value' + + +@pytest.mark.asyncio +async def test_allocate_quota_async_from_dict(): + await test_allocate_quota_async(request_type=dict) + +def test_allocate_quota_field_headers(): + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = quota_controller.AllocateQuotaRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.allocate_quota), + '__call__') as call: + call.return_value = quota_controller.AllocateQuotaResponse() + client.allocate_quota(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_allocate_quota_field_headers_async(): + client = QuotaControllerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = quota_controller.AllocateQuotaRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.allocate_quota), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(quota_controller.AllocateQuotaResponse()) + await client.allocate_quota(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +def test_allocate_quota_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.allocate_quota in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.allocate_quota] = mock_rpc + + request = {} + client.allocate_quota(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.allocate_quota(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.QuotaControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.QuotaControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = QuotaControllerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.QuotaControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = QuotaControllerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = QuotaControllerClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.QuotaControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = QuotaControllerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.QuotaControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = QuotaControllerClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.QuotaControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.QuotaControllerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.QuotaControllerGrpcTransport, + transports.QuotaControllerGrpcAsyncIOTransport, + transports.QuotaControllerRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = QuotaControllerClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_allocate_quota_empty_call_grpc(): + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.allocate_quota), + '__call__') as call: + call.return_value = quota_controller.AllocateQuotaResponse() + client.allocate_quota(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = quota_controller.AllocateQuotaRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = QuotaControllerAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = QuotaControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_allocate_quota_empty_call_grpc_asyncio(): + client = QuotaControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.allocate_quota), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(quota_controller.AllocateQuotaResponse( + operation_id='operation_id_value', + service_config_id='service_config_id_value', + )) + await client.allocate_quota(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = quota_controller.AllocateQuotaRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = QuotaControllerClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_allocate_quota_rest_bad_request(request_type=quota_controller.AllocateQuotaRequest): + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.allocate_quota(request) + + +@pytest.mark.parametrize("request_type", [ + quota_controller.AllocateQuotaRequest, + dict, +]) +def test_allocate_quota_rest_call_success(request_type): + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = quota_controller.AllocateQuotaResponse( + operation_id='operation_id_value', + service_config_id='service_config_id_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = quota_controller.AllocateQuotaResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.allocate_quota(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, quota_controller.AllocateQuotaResponse) + assert response.operation_id == 'operation_id_value' + assert response.service_config_id == 'service_config_id_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_allocate_quota_rest_interceptors(null_interceptor): + transport = transports.QuotaControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.QuotaControllerRestInterceptor(), + ) + client = QuotaControllerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.QuotaControllerRestInterceptor, "post_allocate_quota") as post, \ + mock.patch.object(transports.QuotaControllerRestInterceptor, "pre_allocate_quota") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = quota_controller.AllocateQuotaRequest.pb(quota_controller.AllocateQuotaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = quota_controller.AllocateQuotaResponse.to_json(quota_controller.AllocateQuotaResponse()) + req.return_value.content = return_value + + request = quota_controller.AllocateQuotaRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = quota_controller.AllocateQuotaResponse() + + client.allocate_quota(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +def test_initialize_client_w_rest(): + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_allocate_quota_empty_call_rest(): + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.allocate_quota), + '__call__') as call: + client.allocate_quota(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = quota_controller.AllocateQuotaRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.QuotaControllerGrpcTransport, + ) + +def test_quota_controller_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.QuotaControllerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_quota_controller_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.servicecontrol_v1.services.quota_controller.transports.QuotaControllerTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.QuotaControllerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'allocate_quota', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_quota_controller_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.servicecontrol_v1.services.quota_controller.transports.QuotaControllerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.QuotaControllerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/servicecontrol', +), + quota_project_id="octopus", + ) + + +def test_quota_controller_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.servicecontrol_v1.services.quota_controller.transports.QuotaControllerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.QuotaControllerTransport() + adc.assert_called_once() + + +def test_quota_controller_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + QuotaControllerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/servicecontrol', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.QuotaControllerGrpcTransport, + transports.QuotaControllerGrpcAsyncIOTransport, + ], +) +def test_quota_controller_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/servicecontrol',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.QuotaControllerGrpcTransport, + transports.QuotaControllerGrpcAsyncIOTransport, + transports.QuotaControllerRestTransport, + ], +) +def test_quota_controller_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.QuotaControllerGrpcTransport, grpc_helpers), + (transports.QuotaControllerGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_quota_controller_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "servicecontrol.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/servicecontrol', +), + scopes=["1", "2"], + default_host="servicecontrol.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.QuotaControllerGrpcTransport, transports.QuotaControllerGrpcAsyncIOTransport]) +def test_quota_controller_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_quota_controller_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.QuotaControllerRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_quota_controller_host_no_port(transport_name): + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='servicecontrol.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'servicecontrol.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://servicecontrol.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_quota_controller_host_with_port(transport_name): + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='servicecontrol.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'servicecontrol.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://servicecontrol.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_quota_controller_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = QuotaControllerClient( + credentials=creds1, + transport=transport_name, + ) + client2 = QuotaControllerClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.allocate_quota._session + session2 = client2.transport.allocate_quota._session + assert session1 != session2 +def test_quota_controller_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.QuotaControllerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_quota_controller_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.QuotaControllerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.QuotaControllerGrpcTransport, transports.QuotaControllerGrpcAsyncIOTransport]) +def test_quota_controller_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.QuotaControllerGrpcTransport, transports.QuotaControllerGrpcAsyncIOTransport]) +def test_quota_controller_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = QuotaControllerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = QuotaControllerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = QuotaControllerClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = QuotaControllerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = QuotaControllerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = QuotaControllerClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = QuotaControllerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = QuotaControllerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = QuotaControllerClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = QuotaControllerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = QuotaControllerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = QuotaControllerClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = QuotaControllerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = QuotaControllerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = QuotaControllerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.QuotaControllerTransport, '_prep_wrapped_messages') as prep: + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.QuotaControllerTransport, '_prep_wrapped_messages') as prep: + transport_class = QuotaControllerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_grpc(): + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = QuotaControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = QuotaControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (QuotaControllerClient, transports.QuotaControllerGrpcTransport), + (QuotaControllerAsyncClient, transports.QuotaControllerGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/servicecontrol_v1/test_service_controller.py b/owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/servicecontrol_v1/test_service_controller.py new file mode 100644 index 000000000000..31933959b027 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v1/tests/unit/gapic/servicecontrol_v1/test_service_controller.py @@ -0,0 +1,2323 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api import distribution_pb2 # type: ignore +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.servicecontrol_v1.services.service_controller import ServiceControllerAsyncClient +from google.cloud.servicecontrol_v1.services.service_controller import ServiceControllerClient +from google.cloud.servicecontrol_v1.services.service_controller import transports +from google.cloud.servicecontrol_v1.types import check_error +from google.cloud.servicecontrol_v1.types import distribution as gas_distribution +from google.cloud.servicecontrol_v1.types import http_request +from google.cloud.servicecontrol_v1.types import log_entry +from google.cloud.servicecontrol_v1.types import metric_value +from google.cloud.servicecontrol_v1.types import operation +from google.cloud.servicecontrol_v1.types import service_controller +from google.logging.type import log_severity_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ServiceControllerClient._get_default_mtls_endpoint(None) is None + assert ServiceControllerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ServiceControllerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ServiceControllerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ServiceControllerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ServiceControllerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert ServiceControllerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ServiceControllerClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ServiceControllerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + ServiceControllerClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ServiceControllerClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ServiceControllerClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ServiceControllerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ServiceControllerClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ServiceControllerClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ServiceControllerClient._get_client_cert_source(None, False) is None + assert ServiceControllerClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert ServiceControllerClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert ServiceControllerClient._get_client_cert_source(None, True) is mock_default_cert_source + assert ServiceControllerClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(ServiceControllerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerClient)) +@mock.patch.object(ServiceControllerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ServiceControllerClient._DEFAULT_UNIVERSE + default_endpoint = ServiceControllerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = ServiceControllerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert ServiceControllerClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert ServiceControllerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == ServiceControllerClient.DEFAULT_MTLS_ENDPOINT + assert ServiceControllerClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert ServiceControllerClient._get_api_endpoint(None, None, default_universe, "always") == ServiceControllerClient.DEFAULT_MTLS_ENDPOINT + assert ServiceControllerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == ServiceControllerClient.DEFAULT_MTLS_ENDPOINT + assert ServiceControllerClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert ServiceControllerClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + ServiceControllerClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ServiceControllerClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert ServiceControllerClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert ServiceControllerClient._get_universe_domain(None, None) == ServiceControllerClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + ServiceControllerClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ServiceControllerClient, "grpc"), + (ServiceControllerAsyncClient, "grpc_asyncio"), + (ServiceControllerClient, "rest"), +]) +def test_service_controller_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'servicecontrol.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://servicecontrol.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ServiceControllerGrpcTransport, "grpc"), + (transports.ServiceControllerGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ServiceControllerRestTransport, "rest"), +]) +def test_service_controller_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ServiceControllerClient, "grpc"), + (ServiceControllerAsyncClient, "grpc_asyncio"), + (ServiceControllerClient, "rest"), +]) +def test_service_controller_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'servicecontrol.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://servicecontrol.googleapis.com' + ) + + +def test_service_controller_client_get_transport_class(): + transport = ServiceControllerClient.get_transport_class() + available_transports = [ + transports.ServiceControllerGrpcTransport, + transports.ServiceControllerRestTransport, + ] + assert transport in available_transports + + transport = ServiceControllerClient.get_transport_class("grpc") + assert transport == transports.ServiceControllerGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ServiceControllerClient, transports.ServiceControllerGrpcTransport, "grpc"), + (ServiceControllerAsyncClient, transports.ServiceControllerGrpcAsyncIOTransport, "grpc_asyncio"), + (ServiceControllerClient, transports.ServiceControllerRestTransport, "rest"), +]) +@mock.patch.object(ServiceControllerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerClient)) +@mock.patch.object(ServiceControllerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerAsyncClient)) +def test_service_controller_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ServiceControllerClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ServiceControllerClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ServiceControllerClient, transports.ServiceControllerGrpcTransport, "grpc", "true"), + (ServiceControllerAsyncClient, transports.ServiceControllerGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (ServiceControllerClient, transports.ServiceControllerGrpcTransport, "grpc", "false"), + (ServiceControllerAsyncClient, transports.ServiceControllerGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (ServiceControllerClient, transports.ServiceControllerRestTransport, "rest", "true"), + (ServiceControllerClient, transports.ServiceControllerRestTransport, "rest", "false"), +]) +@mock.patch.object(ServiceControllerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerClient)) +@mock.patch.object(ServiceControllerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_service_controller_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ServiceControllerClient, ServiceControllerAsyncClient +]) +@mock.patch.object(ServiceControllerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceControllerClient)) +@mock.patch.object(ServiceControllerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceControllerAsyncClient)) +def test_service_controller_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + ServiceControllerClient, ServiceControllerAsyncClient +]) +@mock.patch.object(ServiceControllerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerClient)) +@mock.patch.object(ServiceControllerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerAsyncClient)) +def test_service_controller_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ServiceControllerClient._DEFAULT_UNIVERSE + default_endpoint = ServiceControllerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = ServiceControllerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ServiceControllerClient, transports.ServiceControllerGrpcTransport, "grpc"), + (ServiceControllerAsyncClient, transports.ServiceControllerGrpcAsyncIOTransport, "grpc_asyncio"), + (ServiceControllerClient, transports.ServiceControllerRestTransport, "rest"), +]) +def test_service_controller_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ServiceControllerClient, transports.ServiceControllerGrpcTransport, "grpc", grpc_helpers), + (ServiceControllerAsyncClient, transports.ServiceControllerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (ServiceControllerClient, transports.ServiceControllerRestTransport, "rest", None), +]) +def test_service_controller_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_service_controller_client_client_options_from_dict(): + with mock.patch('google.cloud.servicecontrol_v1.services.service_controller.transports.ServiceControllerGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = ServiceControllerClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ServiceControllerClient, transports.ServiceControllerGrpcTransport, "grpc", grpc_helpers), + (ServiceControllerAsyncClient, transports.ServiceControllerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_service_controller_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "servicecontrol.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/servicecontrol', +), + scopes=None, + default_host="servicecontrol.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + service_controller.CheckRequest, + dict, +]) +def test_check(request_type, transport: str = 'grpc'): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_controller.CheckResponse( + operation_id='operation_id_value', + service_config_id='service_config_id_value', + service_rollout_id='service_rollout_id_value', + ) + response = client.check(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service_controller.CheckRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service_controller.CheckResponse) + assert response.operation_id == 'operation_id_value' + assert response.service_config_id == 'service_config_id_value' + assert response.service_rollout_id == 'service_rollout_id_value' + + +def test_check_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service_controller.CheckRequest( + service_name='service_name_value', + service_config_id='service_config_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.check(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service_controller.CheckRequest( + service_name='service_name_value', + service_config_id='service_config_id_value', + ) + +def test_check_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.check in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.check] = mock_rpc + request = {} + client.check(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.check(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_check_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.check in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.check] = mock_rpc + + request = {} + await client.check(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.check(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_check_async(transport: str = 'grpc_asyncio', request_type=service_controller.CheckRequest): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service_controller.CheckResponse( + operation_id='operation_id_value', + service_config_id='service_config_id_value', + service_rollout_id='service_rollout_id_value', + )) + response = await client.check(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service_controller.CheckRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service_controller.CheckResponse) + assert response.operation_id == 'operation_id_value' + assert response.service_config_id == 'service_config_id_value' + assert response.service_rollout_id == 'service_rollout_id_value' + + +@pytest.mark.asyncio +async def test_check_async_from_dict(): + await test_check_async(request_type=dict) + +def test_check_field_headers(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_controller.CheckRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + call.return_value = service_controller.CheckResponse() + client.check(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_check_field_headers_async(): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_controller.CheckRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_controller.CheckResponse()) + await client.check(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + service_controller.ReportRequest, + dict, +]) +def test_report(request_type, transport: str = 'grpc'): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_controller.ReportResponse( + service_config_id='service_config_id_value', + service_rollout_id='service_rollout_id_value', + ) + response = client.report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service_controller.ReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service_controller.ReportResponse) + assert response.service_config_id == 'service_config_id_value' + assert response.service_rollout_id == 'service_rollout_id_value' + + +def test_report_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service_controller.ReportRequest( + service_name='service_name_value', + service_config_id='service_config_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.report(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service_controller.ReportRequest( + service_name='service_name_value', + service_config_id='service_config_id_value', + ) + +def test_report_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.report] = mock_rpc + request = {} + client.report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_report_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.report in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.report] = mock_rpc + + request = {} + await client.report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_report_async(transport: str = 'grpc_asyncio', request_type=service_controller.ReportRequest): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service_controller.ReportResponse( + service_config_id='service_config_id_value', + service_rollout_id='service_rollout_id_value', + )) + response = await client.report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service_controller.ReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service_controller.ReportResponse) + assert response.service_config_id == 'service_config_id_value' + assert response.service_rollout_id == 'service_rollout_id_value' + + +@pytest.mark.asyncio +async def test_report_async_from_dict(): + await test_report_async(request_type=dict) + +def test_report_field_headers(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_controller.ReportRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + call.return_value = service_controller.ReportResponse() + client.report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_report_field_headers_async(): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_controller.ReportRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_controller.ReportResponse()) + await client.report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +def test_check_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.check in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.check] = mock_rpc + + request = {} + client.check(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.check(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.report] = mock_rpc + + request = {} + client.report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ServiceControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ServiceControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceControllerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ServiceControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ServiceControllerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ServiceControllerClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ServiceControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceControllerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ServiceControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ServiceControllerClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ServiceControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ServiceControllerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.ServiceControllerGrpcTransport, + transports.ServiceControllerGrpcAsyncIOTransport, + transports.ServiceControllerRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = ServiceControllerClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_check_empty_call_grpc(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + call.return_value = service_controller.CheckResponse() + client.check(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service_controller.CheckRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_report_empty_call_grpc(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + call.return_value = service_controller.ReportResponse() + client.report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service_controller.ReportRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ServiceControllerAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_check_empty_call_grpc_asyncio(): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_controller.CheckResponse( + operation_id='operation_id_value', + service_config_id='service_config_id_value', + service_rollout_id='service_rollout_id_value', + )) + await client.check(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service_controller.CheckRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_report_empty_call_grpc_asyncio(): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_controller.ReportResponse( + service_config_id='service_config_id_value', + service_rollout_id='service_rollout_id_value', + )) + await client.report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service_controller.ReportRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = ServiceControllerClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_check_rest_bad_request(request_type=service_controller.CheckRequest): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.check(request) + + +@pytest.mark.parametrize("request_type", [ + service_controller.CheckRequest, + dict, +]) +def test_check_rest_call_success(request_type): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service_controller.CheckResponse( + operation_id='operation_id_value', + service_config_id='service_config_id_value', + service_rollout_id='service_rollout_id_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service_controller.CheckResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.check(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service_controller.CheckResponse) + assert response.operation_id == 'operation_id_value' + assert response.service_config_id == 'service_config_id_value' + assert response.service_rollout_id == 'service_rollout_id_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_check_rest_interceptors(null_interceptor): + transport = transports.ServiceControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceControllerRestInterceptor(), + ) + client = ServiceControllerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceControllerRestInterceptor, "post_check") as post, \ + mock.patch.object(transports.ServiceControllerRestInterceptor, "pre_check") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service_controller.CheckRequest.pb(service_controller.CheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = service_controller.CheckResponse.to_json(service_controller.CheckResponse()) + req.return_value.content = return_value + + request = service_controller.CheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service_controller.CheckResponse() + + client.check(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_report_rest_bad_request(request_type=service_controller.ReportRequest): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.report(request) + + +@pytest.mark.parametrize("request_type", [ + service_controller.ReportRequest, + dict, +]) +def test_report_rest_call_success(request_type): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service_controller.ReportResponse( + service_config_id='service_config_id_value', + service_rollout_id='service_rollout_id_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service_controller.ReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service_controller.ReportResponse) + assert response.service_config_id == 'service_config_id_value' + assert response.service_rollout_id == 'service_rollout_id_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_report_rest_interceptors(null_interceptor): + transport = transports.ServiceControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceControllerRestInterceptor(), + ) + client = ServiceControllerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceControllerRestInterceptor, "post_report") as post, \ + mock.patch.object(transports.ServiceControllerRestInterceptor, "pre_report") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service_controller.ReportRequest.pb(service_controller.ReportRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = service_controller.ReportResponse.to_json(service_controller.ReportResponse()) + req.return_value.content = return_value + + request = service_controller.ReportRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service_controller.ReportResponse() + + client.report(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +def test_initialize_client_w_rest(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_check_empty_call_rest(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + client.check(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service_controller.CheckRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_report_empty_call_rest(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + client.report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service_controller.ReportRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ServiceControllerGrpcTransport, + ) + +def test_service_controller_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ServiceControllerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_service_controller_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.servicecontrol_v1.services.service_controller.transports.ServiceControllerTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ServiceControllerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'check', + 'report', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_service_controller_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.servicecontrol_v1.services.service_controller.transports.ServiceControllerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServiceControllerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/servicecontrol', +), + quota_project_id="octopus", + ) + + +def test_service_controller_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.servicecontrol_v1.services.service_controller.transports.ServiceControllerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServiceControllerTransport() + adc.assert_called_once() + + +def test_service_controller_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ServiceControllerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/servicecontrol', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceControllerGrpcTransport, + transports.ServiceControllerGrpcAsyncIOTransport, + ], +) +def test_service_controller_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/servicecontrol',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceControllerGrpcTransport, + transports.ServiceControllerGrpcAsyncIOTransport, + transports.ServiceControllerRestTransport, + ], +) +def test_service_controller_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ServiceControllerGrpcTransport, grpc_helpers), + (transports.ServiceControllerGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_service_controller_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "servicecontrol.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/servicecontrol', +), + scopes=["1", "2"], + default_host="servicecontrol.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.ServiceControllerGrpcTransport, transports.ServiceControllerGrpcAsyncIOTransport]) +def test_service_controller_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_service_controller_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ServiceControllerRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_service_controller_host_no_port(transport_name): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='servicecontrol.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'servicecontrol.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://servicecontrol.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_service_controller_host_with_port(transport_name): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='servicecontrol.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'servicecontrol.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://servicecontrol.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_service_controller_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ServiceControllerClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ServiceControllerClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.check._session + session2 = client2.transport.check._session + assert session1 != session2 + session1 = client1.transport.report._session + session2 = client2.transport.report._session + assert session1 != session2 +def test_service_controller_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ServiceControllerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_service_controller_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ServiceControllerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ServiceControllerGrpcTransport, transports.ServiceControllerGrpcAsyncIOTransport]) +def test_service_controller_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ServiceControllerGrpcTransport, transports.ServiceControllerGrpcAsyncIOTransport]) +def test_service_controller_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ServiceControllerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ServiceControllerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceControllerClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = ServiceControllerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ServiceControllerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceControllerClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ServiceControllerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ServiceControllerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceControllerClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = ServiceControllerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ServiceControllerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceControllerClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ServiceControllerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ServiceControllerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceControllerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ServiceControllerTransport, '_prep_wrapped_messages') as prep: + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ServiceControllerTransport, '_prep_wrapped_messages') as prep: + transport_class = ServiceControllerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_grpc(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ServiceControllerClient, transports.ServiceControllerGrpcTransport), + (ServiceControllerAsyncClient, transports.ServiceControllerGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-service-control/v2/.coveragerc b/owl-bot-staging/google-cloud-service-control/v2/.coveragerc new file mode 100644 index 000000000000..db7396937636 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/servicecontrol/__init__.py + google/cloud/servicecontrol/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-cloud-service-control/v2/.flake8 b/owl-bot-staging/google-cloud-service-control/v2/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-cloud-service-control/v2/MANIFEST.in b/owl-bot-staging/google-cloud-service-control/v2/MANIFEST.in new file mode 100644 index 000000000000..732f0e20b03c --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/servicecontrol *.py +recursive-include google/cloud/servicecontrol_v2 *.py diff --git a/owl-bot-staging/google-cloud-service-control/v2/README.rst b/owl-bot-staging/google-cloud-service-control/v2/README.rst new file mode 100644 index 000000000000..68e48382eefb --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Servicecontrol API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Servicecontrol API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-service-control/v2/docs/_static/custom.css b/owl-bot-staging/google-cloud-service-control/v2/docs/_static/custom.css new file mode 100644 index 000000000000..06423be0b592 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/google-cloud-service-control/v2/docs/conf.py b/owl-bot-staging/google-cloud-service-control/v2/docs/conf.py new file mode 100644 index 000000000000..583bcfefe02f --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-service-control documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-service-control" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-service-control-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-service-control.tex", + u"google-cloud-service-control Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-service-control", + u"Google Cloud Servicecontrol Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-service-control", + u"google-cloud-service-control Documentation", + author, + "google-cloud-service-control", + "GAPIC library for Google Cloud Servicecontrol API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-service-control/v2/docs/index.rst b/owl-bot-staging/google-cloud-service-control/v2/docs/index.rst new file mode 100644 index 000000000000..c3cafc8da704 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + servicecontrol_v2/services_ + servicecontrol_v2/types_ diff --git a/owl-bot-staging/google-cloud-service-control/v2/docs/servicecontrol_v2/service_controller.rst b/owl-bot-staging/google-cloud-service-control/v2/docs/servicecontrol_v2/service_controller.rst new file mode 100644 index 000000000000..56ae69a41ce6 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/docs/servicecontrol_v2/service_controller.rst @@ -0,0 +1,6 @@ +ServiceController +----------------------------------- + +.. automodule:: google.cloud.servicecontrol_v2.services.service_controller + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-service-control/v2/docs/servicecontrol_v2/services_.rst b/owl-bot-staging/google-cloud-service-control/v2/docs/servicecontrol_v2/services_.rst new file mode 100644 index 000000000000..a56e4968e5c0 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/docs/servicecontrol_v2/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Servicecontrol v2 API +=============================================== +.. toctree:: + :maxdepth: 2 + + service_controller diff --git a/owl-bot-staging/google-cloud-service-control/v2/docs/servicecontrol_v2/types_.rst b/owl-bot-staging/google-cloud-service-control/v2/docs/servicecontrol_v2/types_.rst new file mode 100644 index 000000000000..804c27dead2a --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/docs/servicecontrol_v2/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Servicecontrol v2 API +============================================ + +.. automodule:: google.cloud.servicecontrol_v2.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol/__init__.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol/__init__.py new file mode 100644 index 000000000000..1a5ed3c9e9fa --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol/__init__.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.servicecontrol import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.servicecontrol_v2.services.service_controller.client import ServiceControllerClient +from google.cloud.servicecontrol_v2.services.service_controller.async_client import ServiceControllerAsyncClient + +from google.cloud.servicecontrol_v2.types.service_controller import CheckRequest +from google.cloud.servicecontrol_v2.types.service_controller import CheckResponse +from google.cloud.servicecontrol_v2.types.service_controller import ReportRequest +from google.cloud.servicecontrol_v2.types.service_controller import ReportResponse +from google.cloud.servicecontrol_v2.types.service_controller import ResourceInfo +from google.cloud.servicecontrol_v2.types.service_controller import ResourceInfoList + +__all__ = ('ServiceControllerClient', + 'ServiceControllerAsyncClient', + 'CheckRequest', + 'CheckResponse', + 'ReportRequest', + 'ReportResponse', + 'ResourceInfo', + 'ResourceInfoList', +) diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol/gapic_version.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol/py.typed b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol/py.typed new file mode 100644 index 000000000000..3971a5d2a8a8 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-service-control package uses inline types. diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/__init__.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/__init__.py new file mode 100644 index 000000000000..1311a4b9486a --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/__init__.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.servicecontrol_v2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.service_controller import ServiceControllerClient +from .services.service_controller import ServiceControllerAsyncClient + +from .types.service_controller import CheckRequest +from .types.service_controller import CheckResponse +from .types.service_controller import ReportRequest +from .types.service_controller import ReportResponse +from .types.service_controller import ResourceInfo +from .types.service_controller import ResourceInfoList + +__all__ = ( + 'ServiceControllerAsyncClient', +'CheckRequest', +'CheckResponse', +'ReportRequest', +'ReportResponse', +'ResourceInfo', +'ResourceInfoList', +'ServiceControllerClient', +) diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/gapic_metadata.json b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/gapic_metadata.json new file mode 100644 index 000000000000..5e12db8824a2 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/gapic_metadata.json @@ -0,0 +1,58 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.servicecontrol_v2", + "protoPackage": "google.api.servicecontrol.v2", + "schema": "1.0", + "services": { + "ServiceController": { + "clients": { + "grpc": { + "libraryClient": "ServiceControllerClient", + "rpcs": { + "Check": { + "methods": [ + "check" + ] + }, + "Report": { + "methods": [ + "report" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ServiceControllerAsyncClient", + "rpcs": { + "Check": { + "methods": [ + "check" + ] + }, + "Report": { + "methods": [ + "report" + ] + } + } + }, + "rest": { + "libraryClient": "ServiceControllerClient", + "rpcs": { + "Check": { + "methods": [ + "check" + ] + }, + "Report": { + "methods": [ + "report" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/gapic_version.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/py.typed b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/py.typed new file mode 100644 index 000000000000..3971a5d2a8a8 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-service-control package uses inline types. diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/__init__.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/__init__.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/__init__.py new file mode 100644 index 000000000000..919b41bcdebf --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ServiceControllerClient +from .async_client import ServiceControllerAsyncClient + +__all__ = ( + 'ServiceControllerClient', + 'ServiceControllerAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/async_client.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/async_client.py new file mode 100644 index 000000000000..b438044ebeba --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/async_client.py @@ -0,0 +1,451 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.servicecontrol_v2 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.servicecontrol_v2.types import service_controller +from google.rpc import status_pb2 # type: ignore +from .transports.base import ServiceControllerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import ServiceControllerGrpcAsyncIOTransport +from .client import ServiceControllerClient + + +class ServiceControllerAsyncClient: + """`Service Control API + v2 `__ + + Private Preview. This feature is only available for approved + services. + + This API provides admission control and telemetry reporting for + services that are integrated with `Service + Infrastructure `__. + """ + + _client: ServiceControllerClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ServiceControllerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ServiceControllerClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ServiceControllerClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ServiceControllerClient._DEFAULT_UNIVERSE + + common_billing_account_path = staticmethod(ServiceControllerClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(ServiceControllerClient.parse_common_billing_account_path) + common_folder_path = staticmethod(ServiceControllerClient.common_folder_path) + parse_common_folder_path = staticmethod(ServiceControllerClient.parse_common_folder_path) + common_organization_path = staticmethod(ServiceControllerClient.common_organization_path) + parse_common_organization_path = staticmethod(ServiceControllerClient.parse_common_organization_path) + common_project_path = staticmethod(ServiceControllerClient.common_project_path) + parse_common_project_path = staticmethod(ServiceControllerClient.parse_common_project_path) + common_location_path = staticmethod(ServiceControllerClient.common_location_path) + parse_common_location_path = staticmethod(ServiceControllerClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceControllerAsyncClient: The constructed client. + """ + return ServiceControllerClient.from_service_account_info.__func__(ServiceControllerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceControllerAsyncClient: The constructed client. + """ + return ServiceControllerClient.from_service_account_file.__func__(ServiceControllerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ServiceControllerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ServiceControllerTransport: + """Returns the transport used by the client instance. + + Returns: + ServiceControllerTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ServiceControllerClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ServiceControllerTransport, Callable[..., ServiceControllerTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the service controller async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ServiceControllerTransport,Callable[..., ServiceControllerTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ServiceControllerTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ServiceControllerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def check(self, + request: Optional[Union[service_controller.CheckRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service_controller.CheckResponse: + r"""Private Preview. This feature is only available for approved + services. + + This method provides admission control for services that are + integrated with `Service + Infrastructure `__. + It checks whether an operation should be allowed based on the + service configuration and relevant policies. It must be called + before the operation is executed. For more information, see + `Admission + Control `__. + + NOTE: The admission control has an expected policy propagation + delay of 60s. The caller **must** not depend on the most recent + policy changes. + + NOTE: The admission control has a hard limit of 1 referenced + resources per call. If an operation refers to more than 1 + resources, the caller must call the Check method multiple times. + + This method requires the ``servicemanagement.services.check`` + permission on the specified service. For more information, see + `Service Control API Access + Control `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicecontrol_v2 + + async def sample_check(): + # Create a client + client = servicecontrol_v2.ServiceControllerAsyncClient() + + # Initialize request argument(s) + request = servicecontrol_v2.CheckRequest( + ) + + # Make the request + response = await client.check(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicecontrol_v2.types.CheckRequest, dict]]): + The request object. Request message for the Check method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicecontrol_v2.types.CheckResponse: + Response message for the Check + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service_controller.CheckRequest): + request = service_controller.CheckRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.check] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def report(self, + request: Optional[Union[service_controller.ReportRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service_controller.ReportResponse: + r"""Private Preview. This feature is only available for approved + services. + + This method provides telemetry reporting for services that are + integrated with `Service + Infrastructure `__. + It reports a list of operations that have occurred on a service. + It must be called after the operations have been executed. For + more information, see `Telemetry + Reporting `__. + + NOTE: The telemetry reporting has a hard limit of 1000 + operations and 1MB per Report call. It is recommended to have no + more than 100 operations per call. + + This method requires the ``servicemanagement.services.report`` + permission on the specified service. For more information, see + `Service Control API Access + Control `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicecontrol_v2 + + async def sample_report(): + # Create a client + client = servicecontrol_v2.ServiceControllerAsyncClient() + + # Initialize request argument(s) + request = servicecontrol_v2.ReportRequest( + ) + + # Make the request + response = await client.report(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicecontrol_v2.types.ReportRequest, dict]]): + The request object. Request message for the Report + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicecontrol_v2.types.ReportResponse: + Response message for the Report + method. If the request contains any + invalid data, the server returns an RPC + error. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service_controller.ReportRequest): + request = service_controller.ReportRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ServiceControllerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ServiceControllerAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/client.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/client.py new file mode 100644 index 000000000000..0329c5c3bfe1 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/client.py @@ -0,0 +1,772 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.servicecontrol_v2 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.servicecontrol_v2.types import service_controller +from google.rpc import status_pb2 # type: ignore +from .transports.base import ServiceControllerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import ServiceControllerGrpcTransport +from .transports.grpc_asyncio import ServiceControllerGrpcAsyncIOTransport +from .transports.rest import ServiceControllerRestTransport + + +class ServiceControllerClientMeta(type): + """Metaclass for the ServiceController client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ServiceControllerTransport]] + _transport_registry["grpc"] = ServiceControllerGrpcTransport + _transport_registry["grpc_asyncio"] = ServiceControllerGrpcAsyncIOTransport + _transport_registry["rest"] = ServiceControllerRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ServiceControllerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ServiceControllerClient(metaclass=ServiceControllerClientMeta): + """`Service Control API + v2 `__ + + Private Preview. This feature is only available for approved + services. + + This API provides admission control and telemetry reporting for + services that are integrated with `Service + Infrastructure `__. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "servicecontrol.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "servicecontrol.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ServiceControllerTransport: + """Returns the transport used by the client instance. + + Returns: + ServiceControllerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = ServiceControllerClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = ServiceControllerClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ServiceControllerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ServiceControllerClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ServiceControllerTransport, Callable[..., ServiceControllerTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the service controller client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ServiceControllerTransport,Callable[..., ServiceControllerTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ServiceControllerTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ServiceControllerClient._read_environment_variables() + self._client_cert_source = ServiceControllerClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = ServiceControllerClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ServiceControllerTransport) + if transport_provided: + # transport is a ServiceControllerTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ServiceControllerTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + ServiceControllerClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[ServiceControllerTransport], Callable[..., ServiceControllerTransport]] = ( + ServiceControllerClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ServiceControllerTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def check(self, + request: Optional[Union[service_controller.CheckRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service_controller.CheckResponse: + r"""Private Preview. This feature is only available for approved + services. + + This method provides admission control for services that are + integrated with `Service + Infrastructure `__. + It checks whether an operation should be allowed based on the + service configuration and relevant policies. It must be called + before the operation is executed. For more information, see + `Admission + Control `__. + + NOTE: The admission control has an expected policy propagation + delay of 60s. The caller **must** not depend on the most recent + policy changes. + + NOTE: The admission control has a hard limit of 1 referenced + resources per call. If an operation refers to more than 1 + resources, the caller must call the Check method multiple times. + + This method requires the ``servicemanagement.services.check`` + permission on the specified service. For more information, see + `Service Control API Access + Control `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicecontrol_v2 + + def sample_check(): + # Create a client + client = servicecontrol_v2.ServiceControllerClient() + + # Initialize request argument(s) + request = servicecontrol_v2.CheckRequest( + ) + + # Make the request + response = client.check(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicecontrol_v2.types.CheckRequest, dict]): + The request object. Request message for the Check method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicecontrol_v2.types.CheckResponse: + Response message for the Check + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service_controller.CheckRequest): + request = service_controller.CheckRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.check] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def report(self, + request: Optional[Union[service_controller.ReportRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service_controller.ReportResponse: + r"""Private Preview. This feature is only available for approved + services. + + This method provides telemetry reporting for services that are + integrated with `Service + Infrastructure `__. + It reports a list of operations that have occurred on a service. + It must be called after the operations have been executed. For + more information, see `Telemetry + Reporting `__. + + NOTE: The telemetry reporting has a hard limit of 1000 + operations and 1MB per Report call. It is recommended to have no + more than 100 operations per call. + + This method requires the ``servicemanagement.services.report`` + permission on the specified service. For more information, see + `Service Control API Access + Control `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicecontrol_v2 + + def sample_report(): + # Create a client + client = servicecontrol_v2.ServiceControllerClient() + + # Initialize request argument(s) + request = servicecontrol_v2.ReportRequest( + ) + + # Make the request + response = client.report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicecontrol_v2.types.ReportRequest, dict]): + The request object. Request message for the Report + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicecontrol_v2.types.ReportResponse: + Response message for the Report + method. If the request contains any + invalid data, the server returns an RPC + error. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service_controller.ReportRequest): + request = service_controller.ReportRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ServiceControllerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ServiceControllerClient", +) diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/README.rst b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/README.rst new file mode 100644 index 000000000000..ab03c3a4d96b --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`ServiceControllerTransport` is the ABC for all transports. +- public child `ServiceControllerGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `ServiceControllerGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseServiceControllerRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `ServiceControllerRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/__init__.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/__init__.py new file mode 100644 index 000000000000..0ec5461601ee --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ServiceControllerTransport +from .grpc import ServiceControllerGrpcTransport +from .grpc_asyncio import ServiceControllerGrpcAsyncIOTransport +from .rest import ServiceControllerRestTransport +from .rest import ServiceControllerRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ServiceControllerTransport]] +_transport_registry['grpc'] = ServiceControllerGrpcTransport +_transport_registry['grpc_asyncio'] = ServiceControllerGrpcAsyncIOTransport +_transport_registry['rest'] = ServiceControllerRestTransport + +__all__ = ( + 'ServiceControllerTransport', + 'ServiceControllerGrpcTransport', + 'ServiceControllerGrpcAsyncIOTransport', + 'ServiceControllerRestTransport', + 'ServiceControllerRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/base.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/base.py new file mode 100644 index 000000000000..a14e029344ce --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/base.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.servicecontrol_v2 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.servicecontrol_v2.types import service_controller + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ServiceControllerTransport(abc.ABC): + """Abstract transport class for ServiceController.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/servicecontrol', + ) + + DEFAULT_HOST: str = 'servicecontrol.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicecontrol.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.check: gapic_v1.method.wrap_method( + self.check, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=5.0, + ), + default_timeout=5.0, + client_info=client_info, + ), + self.report: gapic_v1.method.wrap_method( + self.report, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def check(self) -> Callable[ + [service_controller.CheckRequest], + Union[ + service_controller.CheckResponse, + Awaitable[service_controller.CheckResponse] + ]]: + raise NotImplementedError() + + @property + def report(self) -> Callable[ + [service_controller.ReportRequest], + Union[ + service_controller.ReportResponse, + Awaitable[service_controller.ReportResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'ServiceControllerTransport', +) diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/grpc.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/grpc.py new file mode 100644 index 000000000000..87951594b11b --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/grpc.py @@ -0,0 +1,345 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.servicecontrol_v2.types import service_controller +from .base import ServiceControllerTransport, DEFAULT_CLIENT_INFO + + +class ServiceControllerGrpcTransport(ServiceControllerTransport): + """gRPC backend transport for ServiceController. + + `Service Control API + v2 `__ + + Private Preview. This feature is only available for approved + services. + + This API provides admission control and telemetry reporting for + services that are integrated with `Service + Infrastructure `__. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicecontrol.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def check(self) -> Callable[ + [service_controller.CheckRequest], + service_controller.CheckResponse]: + r"""Return a callable for the check method over gRPC. + + Private Preview. This feature is only available for approved + services. + + This method provides admission control for services that are + integrated with `Service + Infrastructure `__. + It checks whether an operation should be allowed based on the + service configuration and relevant policies. It must be called + before the operation is executed. For more information, see + `Admission + Control `__. + + NOTE: The admission control has an expected policy propagation + delay of 60s. The caller **must** not depend on the most recent + policy changes. + + NOTE: The admission control has a hard limit of 1 referenced + resources per call. If an operation refers to more than 1 + resources, the caller must call the Check method multiple times. + + This method requires the ``servicemanagement.services.check`` + permission on the specified service. For more information, see + `Service Control API Access + Control `__. + + Returns: + Callable[[~.CheckRequest], + ~.CheckResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'check' not in self._stubs: + self._stubs['check'] = self.grpc_channel.unary_unary( + '/google.api.servicecontrol.v2.ServiceController/Check', + request_serializer=service_controller.CheckRequest.serialize, + response_deserializer=service_controller.CheckResponse.deserialize, + ) + return self._stubs['check'] + + @property + def report(self) -> Callable[ + [service_controller.ReportRequest], + service_controller.ReportResponse]: + r"""Return a callable for the report method over gRPC. + + Private Preview. This feature is only available for approved + services. + + This method provides telemetry reporting for services that are + integrated with `Service + Infrastructure `__. + It reports a list of operations that have occurred on a service. + It must be called after the operations have been executed. For + more information, see `Telemetry + Reporting `__. + + NOTE: The telemetry reporting has a hard limit of 1000 + operations and 1MB per Report call. It is recommended to have no + more than 100 operations per call. + + This method requires the ``servicemanagement.services.report`` + permission on the specified service. For more information, see + `Service Control API Access + Control `__. + + Returns: + Callable[[~.ReportRequest], + ~.ReportResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'report' not in self._stubs: + self._stubs['report'] = self.grpc_channel.unary_unary( + '/google.api.servicecontrol.v2.ServiceController/Report', + request_serializer=service_controller.ReportRequest.serialize, + response_deserializer=service_controller.ReportResponse.deserialize, + ) + return self._stubs['report'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'ServiceControllerGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/grpc_asyncio.py new file mode 100644 index 000000000000..bee678ff4502 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/grpc_asyncio.py @@ -0,0 +1,380 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.servicecontrol_v2.types import service_controller +from .base import ServiceControllerTransport, DEFAULT_CLIENT_INFO +from .grpc import ServiceControllerGrpcTransport + + +class ServiceControllerGrpcAsyncIOTransport(ServiceControllerTransport): + """gRPC AsyncIO backend transport for ServiceController. + + `Service Control API + v2 `__ + + Private Preview. This feature is only available for approved + services. + + This API provides admission control and telemetry reporting for + services that are integrated with `Service + Infrastructure `__. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicecontrol.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def check(self) -> Callable[ + [service_controller.CheckRequest], + Awaitable[service_controller.CheckResponse]]: + r"""Return a callable for the check method over gRPC. + + Private Preview. This feature is only available for approved + services. + + This method provides admission control for services that are + integrated with `Service + Infrastructure `__. + It checks whether an operation should be allowed based on the + service configuration and relevant policies. It must be called + before the operation is executed. For more information, see + `Admission + Control `__. + + NOTE: The admission control has an expected policy propagation + delay of 60s. The caller **must** not depend on the most recent + policy changes. + + NOTE: The admission control has a hard limit of 1 referenced + resources per call. If an operation refers to more than 1 + resources, the caller must call the Check method multiple times. + + This method requires the ``servicemanagement.services.check`` + permission on the specified service. For more information, see + `Service Control API Access + Control `__. + + Returns: + Callable[[~.CheckRequest], + Awaitable[~.CheckResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'check' not in self._stubs: + self._stubs['check'] = self.grpc_channel.unary_unary( + '/google.api.servicecontrol.v2.ServiceController/Check', + request_serializer=service_controller.CheckRequest.serialize, + response_deserializer=service_controller.CheckResponse.deserialize, + ) + return self._stubs['check'] + + @property + def report(self) -> Callable[ + [service_controller.ReportRequest], + Awaitable[service_controller.ReportResponse]]: + r"""Return a callable for the report method over gRPC. + + Private Preview. This feature is only available for approved + services. + + This method provides telemetry reporting for services that are + integrated with `Service + Infrastructure `__. + It reports a list of operations that have occurred on a service. + It must be called after the operations have been executed. For + more information, see `Telemetry + Reporting `__. + + NOTE: The telemetry reporting has a hard limit of 1000 + operations and 1MB per Report call. It is recommended to have no + more than 100 operations per call. + + This method requires the ``servicemanagement.services.report`` + permission on the specified service. For more information, see + `Service Control API Access + Control `__. + + Returns: + Callable[[~.ReportRequest], + Awaitable[~.ReportResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'report' not in self._stubs: + self._stubs['report'] = self.grpc_channel.unary_unary( + '/google.api.servicecontrol.v2.ServiceController/Report', + request_serializer=service_controller.ReportRequest.serialize, + response_deserializer=service_controller.ReportResponse.deserialize, + ) + return self._stubs['report'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.check: self._wrap_method( + self.check, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=5.0, + ), + default_timeout=5.0, + client_info=client_info, + ), + self.report: self._wrap_method( + self.report, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + +__all__ = ( + 'ServiceControllerGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/rest.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/rest.py new file mode 100644 index 000000000000..7d38c2e090f5 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/rest.py @@ -0,0 +1,397 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 + +from google.protobuf import json_format + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.servicecontrol_v2.types import service_controller + + +from .rest_base import _BaseServiceControllerRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class ServiceControllerRestInterceptor: + """Interceptor for ServiceController. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ServiceControllerRestTransport. + + .. code-block:: python + class MyCustomServiceControllerInterceptor(ServiceControllerRestInterceptor): + def pre_check(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_check(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_report(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ServiceControllerRestTransport(interceptor=MyCustomServiceControllerInterceptor()) + client = ServiceControllerClient(transport=transport) + + + """ + def pre_check(self, request: service_controller.CheckRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[service_controller.CheckRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for check + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceController server. + """ + return request, metadata + + def post_check(self, response: service_controller.CheckResponse) -> service_controller.CheckResponse: + """Post-rpc interceptor for check + + Override in a subclass to manipulate the response + after it is returned by the ServiceController server but before + it is returned to user code. + """ + return response + + def pre_report(self, request: service_controller.ReportRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[service_controller.ReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for report + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceController server. + """ + return request, metadata + + def post_report(self, response: service_controller.ReportResponse) -> service_controller.ReportResponse: + """Post-rpc interceptor for report + + Override in a subclass to manipulate the response + after it is returned by the ServiceController server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ServiceControllerRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ServiceControllerRestInterceptor + + +class ServiceControllerRestTransport(_BaseServiceControllerRestTransport): + """REST backend synchronous transport for ServiceController. + + `Service Control API + v2 `__ + + Private Preview. This feature is only available for approved + services. + + This API provides admission control and telemetry reporting for + services that are integrated with `Service + Infrastructure `__. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[ServiceControllerRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicecontrol.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ServiceControllerRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Check(_BaseServiceControllerRestTransport._BaseCheck, ServiceControllerRestStub): + def __hash__(self): + return hash("ServiceControllerRestTransport.Check") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service_controller.CheckRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> service_controller.CheckResponse: + r"""Call the check method over HTTP. + + Args: + request (~.service_controller.CheckRequest): + The request object. Request message for the Check method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service_controller.CheckResponse: + Response message for the Check + method. + + """ + + http_options = _BaseServiceControllerRestTransport._BaseCheck._get_http_options() + request, metadata = self._interceptor.pre_check(request, metadata) + transcoded_request = _BaseServiceControllerRestTransport._BaseCheck._get_transcoded_request(http_options, request) + + body = _BaseServiceControllerRestTransport._BaseCheck._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseServiceControllerRestTransport._BaseCheck._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceControllerRestTransport._Check._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service_controller.CheckResponse() + pb_resp = service_controller.CheckResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_check(resp) + return resp + + class _Report(_BaseServiceControllerRestTransport._BaseReport, ServiceControllerRestStub): + def __hash__(self): + return hash("ServiceControllerRestTransport.Report") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service_controller.ReportRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> service_controller.ReportResponse: + r"""Call the report method over HTTP. + + Args: + request (~.service_controller.ReportRequest): + The request object. Request message for the Report + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service_controller.ReportResponse: + Response message for the Report + method. If the request contains any + invalid data, the server returns an RPC + error. + + """ + + http_options = _BaseServiceControllerRestTransport._BaseReport._get_http_options() + request, metadata = self._interceptor.pre_report(request, metadata) + transcoded_request = _BaseServiceControllerRestTransport._BaseReport._get_transcoded_request(http_options, request) + + body = _BaseServiceControllerRestTransport._BaseReport._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseServiceControllerRestTransport._BaseReport._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceControllerRestTransport._Report._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service_controller.ReportResponse() + pb_resp = service_controller.ReportResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_report(resp) + return resp + + @property + def check(self) -> Callable[ + [service_controller.CheckRequest], + service_controller.CheckResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Check(self._session, self._host, self._interceptor) # type: ignore + + @property + def report(self) -> Callable[ + [service_controller.ReportRequest], + service_controller.ReportResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Report(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ServiceControllerRestTransport', +) diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/rest_base.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/rest_base.py new file mode 100644 index 000000000000..795632089df5 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/services/service_controller/transports/rest_base.py @@ -0,0 +1,169 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import ServiceControllerTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.servicecontrol_v2.types import service_controller + + +class _BaseServiceControllerRestTransport(ServiceControllerTransport): + """Base REST backend transport for ServiceController. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'servicecontrol.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicecontrol.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCheck: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/services/{service_name}:check', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service_controller.CheckRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseReport: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/services/{service_name}:report', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service_controller.ReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + +__all__=( + '_BaseServiceControllerRestTransport', +) diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/types/__init__.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/types/__init__.py new file mode 100644 index 000000000000..05163110587a --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/types/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .service_controller import ( + CheckRequest, + CheckResponse, + ReportRequest, + ReportResponse, + ResourceInfo, + ResourceInfoList, +) + +__all__ = ( + 'CheckRequest', + 'CheckResponse', + 'ReportRequest', + 'ReportResponse', + 'ResourceInfo', + 'ResourceInfoList', +) diff --git a/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/types/service_controller.py b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/types/service_controller.py new file mode 100644 index 000000000000..87da4ab57349 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/google/cloud/servicecontrol_v2/types/service_controller.py @@ -0,0 +1,232 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.rpc import status_pb2 # type: ignore +from google.rpc.context import attribute_context_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.api.servicecontrol.v2', + manifest={ + 'CheckRequest', + 'ResourceInfo', + 'CheckResponse', + 'ReportRequest', + 'ReportResponse', + 'ResourceInfoList', + }, +) + + +class CheckRequest(proto.Message): + r"""Request message for the Check method. + + Attributes: + service_name (str): + The service name as specified in its service configuration. + For example, ``"pubsub.googleapis.com"``. + + See + `google.api.Service `__ + for the definition of a service name. + service_config_id (str): + Specifies the version of the service + configuration that should be used to process the + request. Must not be empty. Set this field to + 'latest' to specify using the latest + configuration. + attributes (google.rpc.context.attribute_context_pb2.AttributeContext): + Describes attributes about the operation + being executed by the service. + resources (MutableSequence[google.cloud.servicecontrol_v2.types.ResourceInfo]): + Describes the resources and the policies + applied to each resource. + flags (str): + Optional. Contains a comma-separated list of + flags. + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + service_config_id: str = proto.Field( + proto.STRING, + number=2, + ) + attributes: attribute_context_pb2.AttributeContext = proto.Field( + proto.MESSAGE, + number=3, + message=attribute_context_pb2.AttributeContext, + ) + resources: MutableSequence['ResourceInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='ResourceInfo', + ) + flags: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ResourceInfo(proto.Message): + r"""Describes a resource referenced in the request. + + Attributes: + name (str): + The name of the resource referenced in the + request. + type_ (str): + The resource type in the format of + "{service}/{kind}". + permission (str): + The resource permission needed for this + request. The format must be + "{service}/{plural}.{verb}". + container (str): + Optional. The identifier of the container of this resource. + For Google Cloud APIs, the resource container must be one of + the following formats: - + ``projects/`` - + ``folders/`` - + ``organizations/`` For the policy + enforcement on the container level (VPCSC and Location + Policy check), this field takes precedence on the container + extracted from name when presents. + location (str): + Optional. The location of the resource. The + value must be a valid zone, region or + multiregion. For example: "europe-west4" or + "northamerica-northeast1-a". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + permission: str = proto.Field( + proto.STRING, + number=3, + ) + container: str = proto.Field( + proto.STRING, + number=4, + ) + location: str = proto.Field( + proto.STRING, + number=5, + ) + + +class CheckResponse(proto.Message): + r"""Response message for the Check method. + + Attributes: + status (google.rpc.status_pb2.Status): + Operation is allowed when this field is not set. Any + non-'OK' status indicates a denial; + [google.rpc.Status.details][google.rpc.Status.details] would + contain additional details about the denial. + headers (MutableMapping[str, str]): + Returns a set of request contexts generated from the + ``CheckRequest``. + """ + + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + headers: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + + +class ReportRequest(proto.Message): + r"""Request message for the Report method. + + Attributes: + service_name (str): + The service name as specified in its service configuration. + For example, ``"pubsub.googleapis.com"``. + + See + `google.api.Service `__ + for the definition of a service name. + service_config_id (str): + Specifies the version of the service + configuration that should be used to process the + request. Must not be empty. Set this field to + 'latest' to specify using the latest + configuration. + operations (MutableSequence[google.rpc.context.attribute_context_pb2.AttributeContext]): + Describes the list of operations to be + reported. Each operation is represented as an + AttributeContext, and contains all attributes + around an API access. + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + service_config_id: str = proto.Field( + proto.STRING, + number=2, + ) + operations: MutableSequence[attribute_context_pb2.AttributeContext] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=attribute_context_pb2.AttributeContext, + ) + + +class ReportResponse(proto.Message): + r"""Response message for the Report method. + If the request contains any invalid data, the server returns an + RPC error. + + """ + + +class ResourceInfoList(proto.Message): + r"""Message containing resource details in a batch mode. + + Attributes: + resources (MutableSequence[google.cloud.servicecontrol_v2.types.ResourceInfo]): + The resource details. + """ + + resources: MutableSequence['ResourceInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ResourceInfo', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-service-control/v2/mypy.ini b/owl-bot-staging/google-cloud-service-control/v2/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-cloud-service-control/v2/noxfile.py b/owl-bot-staging/google-cloud-service-control/v2/noxfile.py new file mode 100644 index 000000000000..938a41d7b192 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/noxfile.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-cloud-service-control' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.13" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): + """Run the unit test suite.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/servicecontrol_v2/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + +@nox.session(python=ALL_PYTHON[-1]) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): + """Run the unit test suite against pre-release versions of dependencies.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/servicecontrol_v2/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_check_async.py b/owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_check_async.py new file mode 100644 index 000000000000..79aaf515dd35 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_check_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Check +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-control + + +# [START servicecontrol_v2_generated_ServiceController_Check_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicecontrol_v2 + + +async def sample_check(): + # Create a client + client = servicecontrol_v2.ServiceControllerAsyncClient() + + # Initialize request argument(s) + request = servicecontrol_v2.CheckRequest( + ) + + # Make the request + response = await client.check(request=request) + + # Handle the response + print(response) + +# [END servicecontrol_v2_generated_ServiceController_Check_async] diff --git a/owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_check_sync.py b/owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_check_sync.py new file mode 100644 index 000000000000..178fb23d0c2e --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_check_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Check +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-control + + +# [START servicecontrol_v2_generated_ServiceController_Check_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicecontrol_v2 + + +def sample_check(): + # Create a client + client = servicecontrol_v2.ServiceControllerClient() + + # Initialize request argument(s) + request = servicecontrol_v2.CheckRequest( + ) + + # Make the request + response = client.check(request=request) + + # Handle the response + print(response) + +# [END servicecontrol_v2_generated_ServiceController_Check_sync] diff --git a/owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_report_async.py b/owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_report_async.py new file mode 100644 index 000000000000..f19c72509fe3 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_report_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Report +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-control + + +# [START servicecontrol_v2_generated_ServiceController_Report_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicecontrol_v2 + + +async def sample_report(): + # Create a client + client = servicecontrol_v2.ServiceControllerAsyncClient() + + # Initialize request argument(s) + request = servicecontrol_v2.ReportRequest( + ) + + # Make the request + response = await client.report(request=request) + + # Handle the response + print(response) + +# [END servicecontrol_v2_generated_ServiceController_Report_async] diff --git a/owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_report_sync.py b/owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_report_sync.py new file mode 100644 index 000000000000..291d95e14092 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/servicecontrol_v2_generated_service_controller_report_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Report +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-control + + +# [START servicecontrol_v2_generated_ServiceController_Report_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicecontrol_v2 + + +def sample_report(): + # Create a client + client = servicecontrol_v2.ServiceControllerClient() + + # Initialize request argument(s) + request = servicecontrol_v2.ReportRequest( + ) + + # Make the request + response = client.report(request=request) + + # Handle the response + print(response) + +# [END servicecontrol_v2_generated_ServiceController_Report_sync] diff --git a/owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v2.json b/owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v2.json new file mode 100644 index 000000000000..2b78179c69f8 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/samples/generated_samples/snippet_metadata_google.api.servicecontrol.v2.json @@ -0,0 +1,321 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.api.servicecontrol.v2", + "version": "v2" + } + ], + "language": "PYTHON", + "name": "google-cloud-service-control", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicecontrol_v2.ServiceControllerAsyncClient", + "shortName": "ServiceControllerAsyncClient" + }, + "fullName": "google.cloud.servicecontrol_v2.ServiceControllerAsyncClient.check", + "method": { + "fullName": "google.api.servicecontrol.v2.ServiceController.Check", + "service": { + "fullName": "google.api.servicecontrol.v2.ServiceController", + "shortName": "ServiceController" + }, + "shortName": "Check" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicecontrol_v2.types.CheckRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicecontrol_v2.types.CheckResponse", + "shortName": "check" + }, + "description": "Sample for Check", + "file": "servicecontrol_v2_generated_service_controller_check_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicecontrol_v2_generated_ServiceController_Check_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicecontrol_v2_generated_service_controller_check_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicecontrol_v2.ServiceControllerClient", + "shortName": "ServiceControllerClient" + }, + "fullName": "google.cloud.servicecontrol_v2.ServiceControllerClient.check", + "method": { + "fullName": "google.api.servicecontrol.v2.ServiceController.Check", + "service": { + "fullName": "google.api.servicecontrol.v2.ServiceController", + "shortName": "ServiceController" + }, + "shortName": "Check" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicecontrol_v2.types.CheckRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicecontrol_v2.types.CheckResponse", + "shortName": "check" + }, + "description": "Sample for Check", + "file": "servicecontrol_v2_generated_service_controller_check_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicecontrol_v2_generated_ServiceController_Check_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicecontrol_v2_generated_service_controller_check_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicecontrol_v2.ServiceControllerAsyncClient", + "shortName": "ServiceControllerAsyncClient" + }, + "fullName": "google.cloud.servicecontrol_v2.ServiceControllerAsyncClient.report", + "method": { + "fullName": "google.api.servicecontrol.v2.ServiceController.Report", + "service": { + "fullName": "google.api.servicecontrol.v2.ServiceController", + "shortName": "ServiceController" + }, + "shortName": "Report" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicecontrol_v2.types.ReportRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicecontrol_v2.types.ReportResponse", + "shortName": "report" + }, + "description": "Sample for Report", + "file": "servicecontrol_v2_generated_service_controller_report_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicecontrol_v2_generated_ServiceController_Report_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicecontrol_v2_generated_service_controller_report_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicecontrol_v2.ServiceControllerClient", + "shortName": "ServiceControllerClient" + }, + "fullName": "google.cloud.servicecontrol_v2.ServiceControllerClient.report", + "method": { + "fullName": "google.api.servicecontrol.v2.ServiceController.Report", + "service": { + "fullName": "google.api.servicecontrol.v2.ServiceController", + "shortName": "ServiceController" + }, + "shortName": "Report" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicecontrol_v2.types.ReportRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicecontrol_v2.types.ReportResponse", + "shortName": "report" + }, + "description": "Sample for Report", + "file": "servicecontrol_v2_generated_service_controller_report_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicecontrol_v2_generated_ServiceController_Report_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicecontrol_v2_generated_service_controller_report_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-cloud-service-control/v2/scripts/fixup_servicecontrol_v2_keywords.py b/owl-bot-staging/google-cloud-service-control/v2/scripts/fixup_servicecontrol_v2_keywords.py new file mode 100644 index 000000000000..f6fd349f7612 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/scripts/fixup_servicecontrol_v2_keywords.py @@ -0,0 +1,177 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class servicecontrolCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'check': ('service_name', 'service_config_id', 'attributes', 'resources', 'flags', ), + 'report': ('service_name', 'service_config_id', 'operations', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=servicecontrolCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the servicecontrol client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-service-control/v2/setup.py b/owl-bot-staging/google-cloud-service-control/v2/setup.py new file mode 100644 index 000000000000..f6f20f5adf64 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/setup.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-service-control' + + +description = "Google Cloud Service Control API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/servicecontrol/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.13.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-control/v2/tests/__init__.py b/owl-bot-staging/google-cloud-service-control/v2/tests/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-control/v2/tests/unit/__init__.py b/owl-bot-staging/google-cloud-service-control/v2/tests/unit/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-control/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-service-control/v2/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-control/v2/tests/unit/gapic/servicecontrol_v2/__init__.py b/owl-bot-staging/google-cloud-service-control/v2/tests/unit/gapic/servicecontrol_v2/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/tests/unit/gapic/servicecontrol_v2/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-control/v2/tests/unit/gapic/servicecontrol_v2/test_service_controller.py b/owl-bot-staging/google-cloud-service-control/v2/tests/unit/gapic/servicecontrol_v2/test_service_controller.py new file mode 100644 index 000000000000..2a364d863641 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-control/v2/tests/unit/gapic/servicecontrol_v2/test_service_controller.py @@ -0,0 +1,2284 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.servicecontrol_v2.services.service_controller import ServiceControllerAsyncClient +from google.cloud.servicecontrol_v2.services.service_controller import ServiceControllerClient +from google.cloud.servicecontrol_v2.services.service_controller import transports +from google.cloud.servicecontrol_v2.types import service_controller +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.rpc.context import attribute_context_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ServiceControllerClient._get_default_mtls_endpoint(None) is None + assert ServiceControllerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ServiceControllerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ServiceControllerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ServiceControllerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ServiceControllerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert ServiceControllerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ServiceControllerClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ServiceControllerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + ServiceControllerClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ServiceControllerClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ServiceControllerClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ServiceControllerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ServiceControllerClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ServiceControllerClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ServiceControllerClient._get_client_cert_source(None, False) is None + assert ServiceControllerClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert ServiceControllerClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert ServiceControllerClient._get_client_cert_source(None, True) is mock_default_cert_source + assert ServiceControllerClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(ServiceControllerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerClient)) +@mock.patch.object(ServiceControllerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ServiceControllerClient._DEFAULT_UNIVERSE + default_endpoint = ServiceControllerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = ServiceControllerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert ServiceControllerClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert ServiceControllerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == ServiceControllerClient.DEFAULT_MTLS_ENDPOINT + assert ServiceControllerClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert ServiceControllerClient._get_api_endpoint(None, None, default_universe, "always") == ServiceControllerClient.DEFAULT_MTLS_ENDPOINT + assert ServiceControllerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == ServiceControllerClient.DEFAULT_MTLS_ENDPOINT + assert ServiceControllerClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert ServiceControllerClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + ServiceControllerClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ServiceControllerClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert ServiceControllerClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert ServiceControllerClient._get_universe_domain(None, None) == ServiceControllerClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + ServiceControllerClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ServiceControllerClient, "grpc"), + (ServiceControllerAsyncClient, "grpc_asyncio"), + (ServiceControllerClient, "rest"), +]) +def test_service_controller_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'servicecontrol.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://servicecontrol.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ServiceControllerGrpcTransport, "grpc"), + (transports.ServiceControllerGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ServiceControllerRestTransport, "rest"), +]) +def test_service_controller_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ServiceControllerClient, "grpc"), + (ServiceControllerAsyncClient, "grpc_asyncio"), + (ServiceControllerClient, "rest"), +]) +def test_service_controller_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'servicecontrol.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://servicecontrol.googleapis.com' + ) + + +def test_service_controller_client_get_transport_class(): + transport = ServiceControllerClient.get_transport_class() + available_transports = [ + transports.ServiceControllerGrpcTransport, + transports.ServiceControllerRestTransport, + ] + assert transport in available_transports + + transport = ServiceControllerClient.get_transport_class("grpc") + assert transport == transports.ServiceControllerGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ServiceControllerClient, transports.ServiceControllerGrpcTransport, "grpc"), + (ServiceControllerAsyncClient, transports.ServiceControllerGrpcAsyncIOTransport, "grpc_asyncio"), + (ServiceControllerClient, transports.ServiceControllerRestTransport, "rest"), +]) +@mock.patch.object(ServiceControllerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerClient)) +@mock.patch.object(ServiceControllerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerAsyncClient)) +def test_service_controller_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ServiceControllerClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ServiceControllerClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ServiceControllerClient, transports.ServiceControllerGrpcTransport, "grpc", "true"), + (ServiceControllerAsyncClient, transports.ServiceControllerGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (ServiceControllerClient, transports.ServiceControllerGrpcTransport, "grpc", "false"), + (ServiceControllerAsyncClient, transports.ServiceControllerGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (ServiceControllerClient, transports.ServiceControllerRestTransport, "rest", "true"), + (ServiceControllerClient, transports.ServiceControllerRestTransport, "rest", "false"), +]) +@mock.patch.object(ServiceControllerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerClient)) +@mock.patch.object(ServiceControllerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_service_controller_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ServiceControllerClient, ServiceControllerAsyncClient +]) +@mock.patch.object(ServiceControllerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceControllerClient)) +@mock.patch.object(ServiceControllerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceControllerAsyncClient)) +def test_service_controller_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + ServiceControllerClient, ServiceControllerAsyncClient +]) +@mock.patch.object(ServiceControllerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerClient)) +@mock.patch.object(ServiceControllerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceControllerAsyncClient)) +def test_service_controller_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ServiceControllerClient._DEFAULT_UNIVERSE + default_endpoint = ServiceControllerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = ServiceControllerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ServiceControllerClient, transports.ServiceControllerGrpcTransport, "grpc"), + (ServiceControllerAsyncClient, transports.ServiceControllerGrpcAsyncIOTransport, "grpc_asyncio"), + (ServiceControllerClient, transports.ServiceControllerRestTransport, "rest"), +]) +def test_service_controller_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ServiceControllerClient, transports.ServiceControllerGrpcTransport, "grpc", grpc_helpers), + (ServiceControllerAsyncClient, transports.ServiceControllerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (ServiceControllerClient, transports.ServiceControllerRestTransport, "rest", None), +]) +def test_service_controller_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_service_controller_client_client_options_from_dict(): + with mock.patch('google.cloud.servicecontrol_v2.services.service_controller.transports.ServiceControllerGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = ServiceControllerClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ServiceControllerClient, transports.ServiceControllerGrpcTransport, "grpc", grpc_helpers), + (ServiceControllerAsyncClient, transports.ServiceControllerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_service_controller_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "servicecontrol.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/servicecontrol', +), + scopes=None, + default_host="servicecontrol.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + service_controller.CheckRequest, + dict, +]) +def test_check(request_type, transport: str = 'grpc'): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_controller.CheckResponse( + ) + response = client.check(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service_controller.CheckRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service_controller.CheckResponse) + + +def test_check_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service_controller.CheckRequest( + service_name='service_name_value', + service_config_id='service_config_id_value', + flags='flags_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.check(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service_controller.CheckRequest( + service_name='service_name_value', + service_config_id='service_config_id_value', + flags='flags_value', + ) + +def test_check_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.check in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.check] = mock_rpc + request = {} + client.check(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.check(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_check_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.check in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.check] = mock_rpc + + request = {} + await client.check(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.check(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_check_async(transport: str = 'grpc_asyncio', request_type=service_controller.CheckRequest): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service_controller.CheckResponse( + )) + response = await client.check(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service_controller.CheckRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service_controller.CheckResponse) + + +@pytest.mark.asyncio +async def test_check_async_from_dict(): + await test_check_async(request_type=dict) + +def test_check_field_headers(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_controller.CheckRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + call.return_value = service_controller.CheckResponse() + client.check(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_check_field_headers_async(): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_controller.CheckRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_controller.CheckResponse()) + await client.check(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + service_controller.ReportRequest, + dict, +]) +def test_report(request_type, transport: str = 'grpc'): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_controller.ReportResponse( + ) + response = client.report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service_controller.ReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service_controller.ReportResponse) + + +def test_report_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service_controller.ReportRequest( + service_name='service_name_value', + service_config_id='service_config_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.report(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service_controller.ReportRequest( + service_name='service_name_value', + service_config_id='service_config_id_value', + ) + +def test_report_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.report] = mock_rpc + request = {} + client.report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_report_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.report in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.report] = mock_rpc + + request = {} + await client.report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_report_async(transport: str = 'grpc_asyncio', request_type=service_controller.ReportRequest): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service_controller.ReportResponse( + )) + response = await client.report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service_controller.ReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service_controller.ReportResponse) + + +@pytest.mark.asyncio +async def test_report_async_from_dict(): + await test_report_async(request_type=dict) + +def test_report_field_headers(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_controller.ReportRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + call.return_value = service_controller.ReportResponse() + client.report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_report_field_headers_async(): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service_controller.ReportRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_controller.ReportResponse()) + await client.report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +def test_check_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.check in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.check] = mock_rpc + + request = {} + client.check(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.check(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.report] = mock_rpc + + request = {} + client.report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ServiceControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ServiceControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceControllerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ServiceControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ServiceControllerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ServiceControllerClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ServiceControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceControllerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ServiceControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ServiceControllerClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ServiceControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ServiceControllerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.ServiceControllerGrpcTransport, + transports.ServiceControllerGrpcAsyncIOTransport, + transports.ServiceControllerRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = ServiceControllerClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_check_empty_call_grpc(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + call.return_value = service_controller.CheckResponse() + client.check(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service_controller.CheckRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_report_empty_call_grpc(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + call.return_value = service_controller.ReportResponse() + client.report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service_controller.ReportRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ServiceControllerAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_check_empty_call_grpc_asyncio(): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_controller.CheckResponse( + )) + await client.check(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service_controller.CheckRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_report_empty_call_grpc_asyncio(): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_controller.ReportResponse( + )) + await client.report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service_controller.ReportRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = ServiceControllerClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_check_rest_bad_request(request_type=service_controller.CheckRequest): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.check(request) + + +@pytest.mark.parametrize("request_type", [ + service_controller.CheckRequest, + dict, +]) +def test_check_rest_call_success(request_type): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service_controller.CheckResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service_controller.CheckResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.check(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service_controller.CheckResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_check_rest_interceptors(null_interceptor): + transport = transports.ServiceControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceControllerRestInterceptor(), + ) + client = ServiceControllerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceControllerRestInterceptor, "post_check") as post, \ + mock.patch.object(transports.ServiceControllerRestInterceptor, "pre_check") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service_controller.CheckRequest.pb(service_controller.CheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = service_controller.CheckResponse.to_json(service_controller.CheckResponse()) + req.return_value.content = return_value + + request = service_controller.CheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service_controller.CheckResponse() + + client.check(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_report_rest_bad_request(request_type=service_controller.ReportRequest): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.report(request) + + +@pytest.mark.parametrize("request_type", [ + service_controller.ReportRequest, + dict, +]) +def test_report_rest_call_success(request_type): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service_controller.ReportResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service_controller.ReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service_controller.ReportResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_report_rest_interceptors(null_interceptor): + transport = transports.ServiceControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceControllerRestInterceptor(), + ) + client = ServiceControllerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceControllerRestInterceptor, "post_report") as post, \ + mock.patch.object(transports.ServiceControllerRestInterceptor, "pre_report") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service_controller.ReportRequest.pb(service_controller.ReportRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = service_controller.ReportResponse.to_json(service_controller.ReportResponse()) + req.return_value.content = return_value + + request = service_controller.ReportRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service_controller.ReportResponse() + + client.report(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +def test_initialize_client_w_rest(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_check_empty_call_rest(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.check), + '__call__') as call: + client.check(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service_controller.CheckRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_report_empty_call_rest(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.report), + '__call__') as call: + client.report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service_controller.ReportRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ServiceControllerGrpcTransport, + ) + +def test_service_controller_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ServiceControllerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_service_controller_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.servicecontrol_v2.services.service_controller.transports.ServiceControllerTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ServiceControllerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'check', + 'report', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_service_controller_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.servicecontrol_v2.services.service_controller.transports.ServiceControllerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServiceControllerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/servicecontrol', +), + quota_project_id="octopus", + ) + + +def test_service_controller_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.servicecontrol_v2.services.service_controller.transports.ServiceControllerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServiceControllerTransport() + adc.assert_called_once() + + +def test_service_controller_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ServiceControllerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/servicecontrol', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceControllerGrpcTransport, + transports.ServiceControllerGrpcAsyncIOTransport, + ], +) +def test_service_controller_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/servicecontrol',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceControllerGrpcTransport, + transports.ServiceControllerGrpcAsyncIOTransport, + transports.ServiceControllerRestTransport, + ], +) +def test_service_controller_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ServiceControllerGrpcTransport, grpc_helpers), + (transports.ServiceControllerGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_service_controller_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "servicecontrol.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/servicecontrol', +), + scopes=["1", "2"], + default_host="servicecontrol.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.ServiceControllerGrpcTransport, transports.ServiceControllerGrpcAsyncIOTransport]) +def test_service_controller_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_service_controller_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ServiceControllerRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_service_controller_host_no_port(transport_name): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='servicecontrol.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'servicecontrol.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://servicecontrol.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_service_controller_host_with_port(transport_name): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='servicecontrol.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'servicecontrol.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://servicecontrol.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_service_controller_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ServiceControllerClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ServiceControllerClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.check._session + session2 = client2.transport.check._session + assert session1 != session2 + session1 = client1.transport.report._session + session2 = client2.transport.report._session + assert session1 != session2 +def test_service_controller_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ServiceControllerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_service_controller_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ServiceControllerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ServiceControllerGrpcTransport, transports.ServiceControllerGrpcAsyncIOTransport]) +def test_service_controller_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ServiceControllerGrpcTransport, transports.ServiceControllerGrpcAsyncIOTransport]) +def test_service_controller_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ServiceControllerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ServiceControllerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceControllerClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = ServiceControllerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ServiceControllerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceControllerClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ServiceControllerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ServiceControllerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceControllerClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = ServiceControllerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ServiceControllerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceControllerClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ServiceControllerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ServiceControllerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceControllerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ServiceControllerTransport, '_prep_wrapped_messages') as prep: + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ServiceControllerTransport, '_prep_wrapped_messages') as prep: + transport_class = ServiceControllerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_grpc(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = ServiceControllerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = ServiceControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ServiceControllerClient, transports.ServiceControllerGrpcTransport), + (ServiceControllerAsyncClient, transports.ServiceControllerGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-service-management/v1/.coveragerc b/owl-bot-staging/google-cloud-service-management/v1/.coveragerc new file mode 100644 index 000000000000..e890e9afc685 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/servicemanagement/__init__.py + google/cloud/servicemanagement/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-cloud-service-management/v1/.flake8 b/owl-bot-staging/google-cloud-service-management/v1/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-cloud-service-management/v1/MANIFEST.in b/owl-bot-staging/google-cloud-service-management/v1/MANIFEST.in new file mode 100644 index 000000000000..224a04acb727 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/servicemanagement *.py +recursive-include google/cloud/servicemanagement_v1 *.py diff --git a/owl-bot-staging/google-cloud-service-management/v1/README.rst b/owl-bot-staging/google-cloud-service-management/v1/README.rst new file mode 100644 index 000000000000..318a1e17e402 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Servicemanagement API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Servicemanagement API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-service-management/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-service-management/v1/docs/_static/custom.css new file mode 100644 index 000000000000..06423be0b592 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/google-cloud-service-management/v1/docs/conf.py b/owl-bot-staging/google-cloud-service-management/v1/docs/conf.py new file mode 100644 index 000000000000..7a520dba3a59 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-service-management documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-service-management" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-service-management-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-service-management.tex", + u"google-cloud-service-management Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-service-management", + u"Google Cloud Servicemanagement Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-service-management", + u"google-cloud-service-management Documentation", + author, + "google-cloud-service-management", + "GAPIC library for Google Cloud Servicemanagement API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-service-management/v1/docs/index.rst b/owl-bot-staging/google-cloud-service-management/v1/docs/index.rst new file mode 100644 index 000000000000..bd12b79d8c5d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + servicemanagement_v1/services_ + servicemanagement_v1/types_ diff --git a/owl-bot-staging/google-cloud-service-management/v1/docs/servicemanagement_v1/service_manager.rst b/owl-bot-staging/google-cloud-service-management/v1/docs/servicemanagement_v1/service_manager.rst new file mode 100644 index 000000000000..54acafe01d57 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/docs/servicemanagement_v1/service_manager.rst @@ -0,0 +1,10 @@ +ServiceManager +-------------------------------- + +.. automodule:: google.cloud.servicemanagement_v1.services.service_manager + :members: + :inherited-members: + +.. automodule:: google.cloud.servicemanagement_v1.services.service_manager.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-service-management/v1/docs/servicemanagement_v1/services_.rst b/owl-bot-staging/google-cloud-service-management/v1/docs/servicemanagement_v1/services_.rst new file mode 100644 index 000000000000..1ced7cc091f6 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/docs/servicemanagement_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Servicemanagement v1 API +================================================== +.. toctree:: + :maxdepth: 2 + + service_manager diff --git a/owl-bot-staging/google-cloud-service-management/v1/docs/servicemanagement_v1/types_.rst b/owl-bot-staging/google-cloud-service-management/v1/docs/servicemanagement_v1/types_.rst new file mode 100644 index 000000000000..f53a66140e4a --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/docs/servicemanagement_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Servicemanagement v1 API +=============================================== + +.. automodule:: google.cloud.servicemanagement_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement/__init__.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement/__init__.py new file mode 100644 index 000000000000..f2ed5ae44dee --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement/__init__.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.servicemanagement import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.servicemanagement_v1.services.service_manager.client import ServiceManagerClient +from google.cloud.servicemanagement_v1.services.service_manager.async_client import ServiceManagerAsyncClient + +from google.cloud.servicemanagement_v1.types.resources import ChangeReport +from google.cloud.servicemanagement_v1.types.resources import ConfigFile +from google.cloud.servicemanagement_v1.types.resources import ConfigRef +from google.cloud.servicemanagement_v1.types.resources import ConfigSource +from google.cloud.servicemanagement_v1.types.resources import Diagnostic +from google.cloud.servicemanagement_v1.types.resources import ManagedService +from google.cloud.servicemanagement_v1.types.resources import OperationMetadata +from google.cloud.servicemanagement_v1.types.resources import Rollout +from google.cloud.servicemanagement_v1.types.servicemanager import CreateServiceConfigRequest +from google.cloud.servicemanagement_v1.types.servicemanager import CreateServiceRequest +from google.cloud.servicemanagement_v1.types.servicemanager import CreateServiceRolloutRequest +from google.cloud.servicemanagement_v1.types.servicemanager import DeleteServiceRequest +from google.cloud.servicemanagement_v1.types.servicemanager import EnableServiceResponse +from google.cloud.servicemanagement_v1.types.servicemanager import GenerateConfigReportRequest +from google.cloud.servicemanagement_v1.types.servicemanager import GenerateConfigReportResponse +from google.cloud.servicemanagement_v1.types.servicemanager import GetServiceConfigRequest +from google.cloud.servicemanagement_v1.types.servicemanager import GetServiceRequest +from google.cloud.servicemanagement_v1.types.servicemanager import GetServiceRolloutRequest +from google.cloud.servicemanagement_v1.types.servicemanager import ListServiceConfigsRequest +from google.cloud.servicemanagement_v1.types.servicemanager import ListServiceConfigsResponse +from google.cloud.servicemanagement_v1.types.servicemanager import ListServiceRolloutsRequest +from google.cloud.servicemanagement_v1.types.servicemanager import ListServiceRolloutsResponse +from google.cloud.servicemanagement_v1.types.servicemanager import ListServicesRequest +from google.cloud.servicemanagement_v1.types.servicemanager import ListServicesResponse +from google.cloud.servicemanagement_v1.types.servicemanager import SubmitConfigSourceRequest +from google.cloud.servicemanagement_v1.types.servicemanager import SubmitConfigSourceResponse +from google.cloud.servicemanagement_v1.types.servicemanager import UndeleteServiceRequest +from google.cloud.servicemanagement_v1.types.servicemanager import UndeleteServiceResponse + +__all__ = ('ServiceManagerClient', + 'ServiceManagerAsyncClient', + 'ChangeReport', + 'ConfigFile', + 'ConfigRef', + 'ConfigSource', + 'Diagnostic', + 'ManagedService', + 'OperationMetadata', + 'Rollout', + 'CreateServiceConfigRequest', + 'CreateServiceRequest', + 'CreateServiceRolloutRequest', + 'DeleteServiceRequest', + 'EnableServiceResponse', + 'GenerateConfigReportRequest', + 'GenerateConfigReportResponse', + 'GetServiceConfigRequest', + 'GetServiceRequest', + 'GetServiceRolloutRequest', + 'ListServiceConfigsRequest', + 'ListServiceConfigsResponse', + 'ListServiceRolloutsRequest', + 'ListServiceRolloutsResponse', + 'ListServicesRequest', + 'ListServicesResponse', + 'SubmitConfigSourceRequest', + 'SubmitConfigSourceResponse', + 'UndeleteServiceRequest', + 'UndeleteServiceResponse', +) diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement/gapic_version.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement/py.typed b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement/py.typed new file mode 100644 index 000000000000..70f6a7b346b6 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-service-management package uses inline types. diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/__init__.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/__init__.py new file mode 100644 index 000000000000..a5870bfa608b --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/__init__.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.servicemanagement_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.service_manager import ServiceManagerClient +from .services.service_manager import ServiceManagerAsyncClient + +from .types.resources import ChangeReport +from .types.resources import ConfigFile +from .types.resources import ConfigRef +from .types.resources import ConfigSource +from .types.resources import Diagnostic +from .types.resources import ManagedService +from .types.resources import OperationMetadata +from .types.resources import Rollout +from .types.servicemanager import CreateServiceConfigRequest +from .types.servicemanager import CreateServiceRequest +from .types.servicemanager import CreateServiceRolloutRequest +from .types.servicemanager import DeleteServiceRequest +from .types.servicemanager import EnableServiceResponse +from .types.servicemanager import GenerateConfigReportRequest +from .types.servicemanager import GenerateConfigReportResponse +from .types.servicemanager import GetServiceConfigRequest +from .types.servicemanager import GetServiceRequest +from .types.servicemanager import GetServiceRolloutRequest +from .types.servicemanager import ListServiceConfigsRequest +from .types.servicemanager import ListServiceConfigsResponse +from .types.servicemanager import ListServiceRolloutsRequest +from .types.servicemanager import ListServiceRolloutsResponse +from .types.servicemanager import ListServicesRequest +from .types.servicemanager import ListServicesResponse +from .types.servicemanager import SubmitConfigSourceRequest +from .types.servicemanager import SubmitConfigSourceResponse +from .types.servicemanager import UndeleteServiceRequest +from .types.servicemanager import UndeleteServiceResponse + +__all__ = ( + 'ServiceManagerAsyncClient', +'ChangeReport', +'ConfigFile', +'ConfigRef', +'ConfigSource', +'CreateServiceConfigRequest', +'CreateServiceRequest', +'CreateServiceRolloutRequest', +'DeleteServiceRequest', +'Diagnostic', +'EnableServiceResponse', +'GenerateConfigReportRequest', +'GenerateConfigReportResponse', +'GetServiceConfigRequest', +'GetServiceRequest', +'GetServiceRolloutRequest', +'ListServiceConfigsRequest', +'ListServiceConfigsResponse', +'ListServiceRolloutsRequest', +'ListServiceRolloutsResponse', +'ListServicesRequest', +'ListServicesResponse', +'ManagedService', +'OperationMetadata', +'Rollout', +'ServiceManagerClient', +'SubmitConfigSourceRequest', +'SubmitConfigSourceResponse', +'UndeleteServiceRequest', +'UndeleteServiceResponse', +) diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/gapic_metadata.json new file mode 100644 index 000000000000..8d71fcd6a7d6 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/gapic_metadata.json @@ -0,0 +1,223 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.servicemanagement_v1", + "protoPackage": "google.api.servicemanagement.v1", + "schema": "1.0", + "services": { + "ServiceManager": { + "clients": { + "grpc": { + "libraryClient": "ServiceManagerClient", + "rpcs": { + "CreateService": { + "methods": [ + "create_service" + ] + }, + "CreateServiceConfig": { + "methods": [ + "create_service_config" + ] + }, + "CreateServiceRollout": { + "methods": [ + "create_service_rollout" + ] + }, + "DeleteService": { + "methods": [ + "delete_service" + ] + }, + "GenerateConfigReport": { + "methods": [ + "generate_config_report" + ] + }, + "GetService": { + "methods": [ + "get_service" + ] + }, + "GetServiceConfig": { + "methods": [ + "get_service_config" + ] + }, + "GetServiceRollout": { + "methods": [ + "get_service_rollout" + ] + }, + "ListServiceConfigs": { + "methods": [ + "list_service_configs" + ] + }, + "ListServiceRollouts": { + "methods": [ + "list_service_rollouts" + ] + }, + "ListServices": { + "methods": [ + "list_services" + ] + }, + "SubmitConfigSource": { + "methods": [ + "submit_config_source" + ] + }, + "UndeleteService": { + "methods": [ + "undelete_service" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ServiceManagerAsyncClient", + "rpcs": { + "CreateService": { + "methods": [ + "create_service" + ] + }, + "CreateServiceConfig": { + "methods": [ + "create_service_config" + ] + }, + "CreateServiceRollout": { + "methods": [ + "create_service_rollout" + ] + }, + "DeleteService": { + "methods": [ + "delete_service" + ] + }, + "GenerateConfigReport": { + "methods": [ + "generate_config_report" + ] + }, + "GetService": { + "methods": [ + "get_service" + ] + }, + "GetServiceConfig": { + "methods": [ + "get_service_config" + ] + }, + "GetServiceRollout": { + "methods": [ + "get_service_rollout" + ] + }, + "ListServiceConfigs": { + "methods": [ + "list_service_configs" + ] + }, + "ListServiceRollouts": { + "methods": [ + "list_service_rollouts" + ] + }, + "ListServices": { + "methods": [ + "list_services" + ] + }, + "SubmitConfigSource": { + "methods": [ + "submit_config_source" + ] + }, + "UndeleteService": { + "methods": [ + "undelete_service" + ] + } + } + }, + "rest": { + "libraryClient": "ServiceManagerClient", + "rpcs": { + "CreateService": { + "methods": [ + "create_service" + ] + }, + "CreateServiceConfig": { + "methods": [ + "create_service_config" + ] + }, + "CreateServiceRollout": { + "methods": [ + "create_service_rollout" + ] + }, + "DeleteService": { + "methods": [ + "delete_service" + ] + }, + "GenerateConfigReport": { + "methods": [ + "generate_config_report" + ] + }, + "GetService": { + "methods": [ + "get_service" + ] + }, + "GetServiceConfig": { + "methods": [ + "get_service_config" + ] + }, + "GetServiceRollout": { + "methods": [ + "get_service_rollout" + ] + }, + "ListServiceConfigs": { + "methods": [ + "list_service_configs" + ] + }, + "ListServiceRollouts": { + "methods": [ + "list_service_rollouts" + ] + }, + "ListServices": { + "methods": [ + "list_services" + ] + }, + "SubmitConfigSource": { + "methods": [ + "submit_config_source" + ] + }, + "UndeleteService": { + "methods": [ + "undelete_service" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/gapic_version.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/py.typed b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/py.typed new file mode 100644 index 000000000000..70f6a7b346b6 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-service-management package uses inline types. diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/__init__.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/__init__.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/__init__.py new file mode 100644 index 000000000000..9cfa5ac8fc74 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ServiceManagerClient +from .async_client import ServiceManagerAsyncClient + +__all__ = ( + 'ServiceManagerClient', + 'ServiceManagerAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/async_client.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/async_client.py new file mode 100644 index 000000000000..2862ab757ef1 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/async_client.py @@ -0,0 +1,2332 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.servicemanagement_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api import auth_pb2 # type: ignore +from google.api import backend_pb2 # type: ignore +from google.api import billing_pb2 # type: ignore +from google.api import client_pb2 # type: ignore +from google.api import context_pb2 # type: ignore +from google.api import control_pb2 # type: ignore +from google.api import documentation_pb2 # type: ignore +from google.api import endpoint_pb2 # type: ignore +from google.api import http_pb2 # type: ignore +from google.api import log_pb2 # type: ignore +from google.api import logging_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.api import monitoring_pb2 # type: ignore +from google.api import quota_pb2 # type: ignore +from google.api import service_pb2 # type: ignore +from google.api import source_info_pb2 # type: ignore +from google.api import system_parameter_pb2 # type: ignore +from google.api import usage_pb2 # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.servicemanagement_v1.services.service_manager import pagers +from google.cloud.servicemanagement_v1.types import resources +from google.cloud.servicemanagement_v1.types import servicemanager +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import api_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import type_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from .transports.base import ServiceManagerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import ServiceManagerGrpcAsyncIOTransport +from .client import ServiceManagerClient + + +class ServiceManagerAsyncClient: + """`Google Service Management + API `__ + """ + + _client: ServiceManagerClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ServiceManagerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ServiceManagerClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ServiceManagerClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ServiceManagerClient._DEFAULT_UNIVERSE + + common_billing_account_path = staticmethod(ServiceManagerClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(ServiceManagerClient.parse_common_billing_account_path) + common_folder_path = staticmethod(ServiceManagerClient.common_folder_path) + parse_common_folder_path = staticmethod(ServiceManagerClient.parse_common_folder_path) + common_organization_path = staticmethod(ServiceManagerClient.common_organization_path) + parse_common_organization_path = staticmethod(ServiceManagerClient.parse_common_organization_path) + common_project_path = staticmethod(ServiceManagerClient.common_project_path) + parse_common_project_path = staticmethod(ServiceManagerClient.parse_common_project_path) + common_location_path = staticmethod(ServiceManagerClient.common_location_path) + parse_common_location_path = staticmethod(ServiceManagerClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceManagerAsyncClient: The constructed client. + """ + return ServiceManagerClient.from_service_account_info.__func__(ServiceManagerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceManagerAsyncClient: The constructed client. + """ + return ServiceManagerClient.from_service_account_file.__func__(ServiceManagerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ServiceManagerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ServiceManagerTransport: + """Returns the transport used by the client instance. + + Returns: + ServiceManagerTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ServiceManagerClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ServiceManagerTransport, Callable[..., ServiceManagerTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the service manager async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ServiceManagerTransport,Callable[..., ServiceManagerTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ServiceManagerTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ServiceManagerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_services(self, + request: Optional[Union[servicemanager.ListServicesRequest, dict]] = None, + *, + producer_project_id: Optional[str] = None, + consumer_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServicesAsyncPager: + r"""Lists managed services. + + Returns all public services. For authenticated users, + also returns all services the calling user has + "servicemanagement.services.get" permission for. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + async def sample_list_services(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.ListServicesRequest( + ) + + # Make the request + page_result = client.list_services(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.servicemanagement_v1.types.ListServicesRequest, dict]]): + The request object. Request message for ``ListServices`` method. + producer_project_id (:class:`str`): + Include services produced by the + specified project. + + This corresponds to the ``producer_project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + consumer_id (:class:`str`): + Include services consumed by the specified consumer. + + The Google Service Management implementation accepts the + following forms: + + - project: + + This corresponds to the ``consumer_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicemanagement_v1.services.service_manager.pagers.ListServicesAsyncPager: + Response message for ListServices method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([producer_project_id, consumer_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.ListServicesRequest): + request = servicemanager.ListServicesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if producer_project_id is not None: + request.producer_project_id = producer_project_id + if consumer_id is not None: + request.consumer_id = consumer_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_services] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListServicesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_service(self, + request: Optional[Union[servicemanager.GetServiceRequest, dict]] = None, + *, + service_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ManagedService: + r"""Gets a managed service. Authentication is required + unless the service is public. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + async def sample_get_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GetServiceRequest( + service_name="service_name_value", + ) + + # Make the request + response = await client.get_service(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicemanagement_v1.types.GetServiceRequest, dict]]): + The request object. Request message for ``GetService`` method. + service_name (:class:`str`): + Required. The name of the service. See the + ``ServiceManager`` overview for naming requirements. For + example: ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicemanagement_v1.types.ManagedService: + The full representation of a Service + that is managed by Google Service + Management. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.GetServiceRequest): + request = servicemanager.GetServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_service(self, + request: Optional[Union[servicemanager.CreateServiceRequest, dict]] = None, + *, + service: Optional[resources.ManagedService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new managed service. + + A managed service is immutable, and is subject to + mandatory 30-day data retention. You cannot move a + service or recreate it within 30 days after deletion. + + One producer project can own no more than 500 services. + For security and reliability purposes, a production + service should be hosted in a dedicated producer + project. + + Operation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + async def sample_create_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.CreateServiceRequest( + ) + + # Make the request + operation = client.create_service(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicemanagement_v1.types.CreateServiceRequest, dict]]): + The request object. Request message for CreateService + method. + service (:class:`google.cloud.servicemanagement_v1.types.ManagedService`): + Required. Initial values for the + service resource. + + This corresponds to the ``service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.servicemanagement_v1.types.ManagedService` The full representation of a Service that is managed by + Google Service Management. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.CreateServiceRequest): + request = servicemanager.CreateServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service is not None: + request.service = service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_service] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.ManagedService, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_service(self, + request: Optional[Union[servicemanager.DeleteServiceRequest, dict]] = None, + *, + service_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a managed service. This method will change the service + to the ``Soft-Delete`` state for 30 days. Within this period, + service producers may call + [UndeleteService][google.api.servicemanagement.v1.ServiceManager.UndeleteService] + to restore the service. After 30 days, the service will be + permanently deleted. + + Operation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + async def sample_delete_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.DeleteServiceRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.delete_service(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicemanagement_v1.types.DeleteServiceRequest, dict]]): + The request object. Request message for DeleteService + method. + service_name (:class:`str`): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.DeleteServiceRequest): + request = servicemanager.DeleteServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + async def undelete_service(self, + request: Optional[Union[servicemanager.UndeleteServiceRequest, dict]] = None, + *, + service_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Revives a previously deleted managed service. The + method restores the service using the configuration at + the time the service was deleted. The target service + must exist and must have been deleted within the last 30 + days. + + Operation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + async def sample_undelete_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.UndeleteServiceRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.undelete_service(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicemanagement_v1.types.UndeleteServiceRequest, dict]]): + The request object. Request message for UndeleteService + method. + service_name (:class:`str`): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.servicemanagement_v1.types.UndeleteServiceResponse` + Response message for UndeleteService method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.UndeleteServiceRequest): + request = servicemanager.UndeleteServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.undelete_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + servicemanager.UndeleteServiceResponse, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_service_configs(self, + request: Optional[Union[servicemanager.ListServiceConfigsRequest, dict]] = None, + *, + service_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServiceConfigsAsyncPager: + r"""Lists the history of the service configuration for a + managed service, from the newest to the oldest. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + async def sample_list_service_configs(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.ListServiceConfigsRequest( + service_name="service_name_value", + ) + + # Make the request + page_result = client.list_service_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.servicemanagement_v1.types.ListServiceConfigsRequest, dict]]): + The request object. Request message for + ListServiceConfigs method. + service_name (:class:`str`): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicemanagement_v1.services.service_manager.pagers.ListServiceConfigsAsyncPager: + Response message for + ListServiceConfigs method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.ListServiceConfigsRequest): + request = servicemanager.ListServiceConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_service_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListServiceConfigsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_service_config(self, + request: Optional[Union[servicemanager.GetServiceConfigRequest, dict]] = None, + *, + service_name: Optional[str] = None, + config_id: Optional[str] = None, + view: Optional[servicemanager.GetServiceConfigRequest.ConfigView] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service_pb2.Service: + r"""Gets a service configuration (version) for a managed + service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + async def sample_get_service_config(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GetServiceConfigRequest( + service_name="service_name_value", + config_id="config_id_value", + ) + + # Make the request + response = await client.get_service_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicemanagement_v1.types.GetServiceConfigRequest, dict]]): + The request object. Request message for GetServiceConfig + method. + service_name (:class:`str`): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config_id (:class:`str`): + Required. The id of the service configuration resource. + + This field must be specified for the server to return + all fields, including ``SourceInfo``. + + This corresponds to the ``config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + view (:class:`google.cloud.servicemanagement_v1.types.GetServiceConfigRequest.ConfigView`): + Specifies which parts of the Service + Config should be returned in the + response. + + This corresponds to the ``view`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api.service_pb2.Service: + Service is the root object of Google API service configuration (service + config). It describes the basic information about a + logical service, such as the service name and the + user-facing title, and delegates other aspects to + sub-sections. Each sub-section is either a proto + message or a repeated proto message that configures a + specific aspect, such as auth. For more information, + see each proto message definition. + + Example: + + type: google.api.Service name: + calendar.googleapis.com title: Google Calendar API + apis: - name: google.calendar.v3.Calendar + + visibility: + rules: - selector: "google.calendar.v3.*" + restriction: PREVIEW + + backend: + rules: - selector: "google.calendar.v3.*" + address: calendar.example.com + + authentication: + providers: - id: google_calendar_auth jwks_uri: + https://www.googleapis.com/oauth2/v1/certs + issuer: https://securetoken.google.com rules: - + selector: "*" requirements: provider_id: + google_calendar_auth + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name, config_id, view]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.GetServiceConfigRequest): + request = servicemanager.GetServiceConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + if config_id is not None: + request.config_id = config_id + if view is not None: + request.view = view + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_service_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + ("config_id", request.config_id), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_service_config(self, + request: Optional[Union[servicemanager.CreateServiceConfigRequest, dict]] = None, + *, + service_name: Optional[str] = None, + service_config: Optional[service_pb2.Service] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service_pb2.Service: + r"""Creates a new service configuration (version) for a managed + service. This method only stores the service configuration. To + roll out the service configuration to backend systems please + call + [CreateServiceRollout][google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout]. + + Only the 100 most recent service configurations and ones + referenced by existing rollouts are kept for each service. The + rest will be deleted eventually. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + async def sample_create_service_config(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.CreateServiceConfigRequest( + service_name="service_name_value", + ) + + # Make the request + response = await client.create_service_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicemanagement_v1.types.CreateServiceConfigRequest, dict]]): + The request object. Request message for + CreateServiceConfig method. + service_name (:class:`str`): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service_config (:class:`google.api.service_pb2.Service`): + Required. The service configuration + resource. + + This corresponds to the ``service_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api.service_pb2.Service: + Service is the root object of Google API service configuration (service + config). It describes the basic information about a + logical service, such as the service name and the + user-facing title, and delegates other aspects to + sub-sections. Each sub-section is either a proto + message or a repeated proto message that configures a + specific aspect, such as auth. For more information, + see each proto message definition. + + Example: + + type: google.api.Service name: + calendar.googleapis.com title: Google Calendar API + apis: - name: google.calendar.v3.Calendar + + visibility: + rules: - selector: "google.calendar.v3.*" + restriction: PREVIEW + + backend: + rules: - selector: "google.calendar.v3.*" + address: calendar.example.com + + authentication: + providers: - id: google_calendar_auth jwks_uri: + https://www.googleapis.com/oauth2/v1/certs + issuer: https://securetoken.google.com rules: - + selector: "*" requirements: provider_id: + google_calendar_auth + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name, service_config]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.CreateServiceConfigRequest): + request = servicemanager.CreateServiceConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + if service_config is not None: + request.service_config = service_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_service_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def submit_config_source(self, + request: Optional[Union[servicemanager.SubmitConfigSourceRequest, dict]] = None, + *, + service_name: Optional[str] = None, + config_source: Optional[resources.ConfigSource] = None, + validate_only: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new service configuration (version) for a managed + service based on user-supplied configuration source files (for + example: OpenAPI Specification). This method stores the source + configurations as well as the generated service configuration. + To rollout the service configuration to other services, please + call + [CreateServiceRollout][google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout]. + + Only the 100 most recent configuration sources and ones + referenced by existing service configurtions are kept for each + service. The rest will be deleted eventually. + + Operation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + async def sample_submit_config_source(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.SubmitConfigSourceRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.submit_config_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicemanagement_v1.types.SubmitConfigSourceRequest, dict]]): + The request object. Request message for + SubmitConfigSource method. + service_name (:class:`str`): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config_source (:class:`google.cloud.servicemanagement_v1.types.ConfigSource`): + Required. The source configuration + for the service. + + This corresponds to the ``config_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + validate_only (:class:`bool`): + Optional. If set, this will result in the generation of + a ``google.api.Service`` configuration based on the + ``ConfigSource`` provided, but the generated config and + the sources will NOT be persisted. + + This corresponds to the ``validate_only`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.servicemanagement_v1.types.SubmitConfigSourceResponse` + Response message for SubmitConfigSource method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name, config_source, validate_only]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.SubmitConfigSourceRequest): + request = servicemanager.SubmitConfigSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + if config_source is not None: + request.config_source = config_source + if validate_only is not None: + request.validate_only = validate_only + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.submit_config_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + servicemanager.SubmitConfigSourceResponse, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_service_rollouts(self, + request: Optional[Union[servicemanager.ListServiceRolloutsRequest, dict]] = None, + *, + service_name: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServiceRolloutsAsyncPager: + r"""Lists the history of the service configuration + rollouts for a managed service, from the newest to the + oldest. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + async def sample_list_service_rollouts(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.ListServiceRolloutsRequest( + service_name="service_name_value", + filter="filter_value", + ) + + # Make the request + page_result = client.list_service_rollouts(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.servicemanagement_v1.types.ListServiceRolloutsRequest, dict]]): + The request object. Request message for + 'ListServiceRollouts' + service_name (:class:`str`): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Required. Use ``filter`` to return subset of rollouts. + The following filters are supported: + + -- By [status] + [google.api.servicemanagement.v1.Rollout.RolloutStatus]. + For example, ``filter='status=SUCCESS'`` + + -- By [strategy] + [google.api.servicemanagement.v1.Rollout.strategy]. For + example, ``filter='strategy=TrafficPercentStrategy'`` + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicemanagement_v1.services.service_manager.pagers.ListServiceRolloutsAsyncPager: + Response message for + ListServiceRollouts method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name, filter]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.ListServiceRolloutsRequest): + request = servicemanager.ListServiceRolloutsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_service_rollouts] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListServiceRolloutsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_service_rollout(self, + request: Optional[Union[servicemanager.GetServiceRolloutRequest, dict]] = None, + *, + service_name: Optional[str] = None, + rollout_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Rollout: + r"""Gets a service configuration + [rollout][google.api.servicemanagement.v1.Rollout]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + async def sample_get_service_rollout(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GetServiceRolloutRequest( + service_name="service_name_value", + rollout_id="rollout_id_value", + ) + + # Make the request + response = await client.get_service_rollout(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicemanagement_v1.types.GetServiceRolloutRequest, dict]]): + The request object. Request message for GetServiceRollout + method. + service_name (:class:`str`): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rollout_id (:class:`str`): + Required. The id of the rollout + resource. + + This corresponds to the ``rollout_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicemanagement_v1.types.Rollout: + A rollout resource that defines how + service configuration versions are + pushed to control plane systems. + Typically, you create a new version of + the service config, and then create a + Rollout to push the service config. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name, rollout_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.GetServiceRolloutRequest): + request = servicemanager.GetServiceRolloutRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + if rollout_id is not None: + request.rollout_id = rollout_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_service_rollout] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + ("rollout_id", request.rollout_id), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_service_rollout(self, + request: Optional[Union[servicemanager.CreateServiceRolloutRequest, dict]] = None, + *, + service_name: Optional[str] = None, + rollout: Optional[resources.Rollout] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new service configuration rollout. Based on + rollout, the Google Service Management will roll out the + service configurations to different backend services. + For example, the logging configuration will be pushed to + Google Cloud Logging. + + Please note that any previous pending and running + Rollouts and associated Operations will be automatically + cancelled so that the latest Rollout will not be blocked + by previous Rollouts. + + Only the 100 most recent (in any state) and the last 10 + successful (if not already part of the set of 100 most + recent) rollouts are kept for each service. The rest + will be deleted eventually. + + Operation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + async def sample_create_service_rollout(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.CreateServiceRolloutRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.create_service_rollout(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicemanagement_v1.types.CreateServiceRolloutRequest, dict]]): + The request object. Request message for + 'CreateServiceRollout' + service_name (:class:`str`): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rollout (:class:`google.cloud.servicemanagement_v1.types.Rollout`): + Required. The rollout resource. The ``service_name`` + field is output only. + + This corresponds to the ``rollout`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.servicemanagement_v1.types.Rollout` A rollout resource that defines how service configuration versions are pushed + to control plane systems. Typically, you create a new + version of the service config, and then create a + Rollout to push the service config. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name, rollout]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.CreateServiceRolloutRequest): + request = servicemanager.CreateServiceRolloutRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + if rollout is not None: + request.rollout = rollout + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_service_rollout] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Rollout, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + async def generate_config_report(self, + request: Optional[Union[servicemanager.GenerateConfigReportRequest, dict]] = None, + *, + new_config: Optional[any_pb2.Any] = None, + old_config: Optional[any_pb2.Any] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> servicemanager.GenerateConfigReportResponse: + r"""Generates and returns a report (errors, warnings and changes + from existing configurations) associated with + GenerateConfigReportRequest.new_value + + If GenerateConfigReportRequest.old_value is specified, + GenerateConfigReportRequest will contain a single ChangeReport + based on the comparison between + GenerateConfigReportRequest.new_value and + GenerateConfigReportRequest.old_value. If + GenerateConfigReportRequest.old_value is not specified, this + method will compare GenerateConfigReportRequest.new_value with + the last pushed service configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + async def sample_generate_config_report(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GenerateConfigReportRequest( + ) + + # Make the request + response = await client.generate_config_report(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicemanagement_v1.types.GenerateConfigReportRequest, dict]]): + The request object. Request message for + GenerateConfigReport method. + new_config (:class:`google.protobuf.any_pb2.Any`): + Required. Service configuration for which we want to + generate the report. For this version of API, the + supported types are + [google.api.servicemanagement.v1.ConfigRef][google.api.servicemanagement.v1.ConfigRef], + [google.api.servicemanagement.v1.ConfigSource][google.api.servicemanagement.v1.ConfigSource], + and [google.api.Service][google.api.Service] + + This corresponds to the ``new_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + old_config (:class:`google.protobuf.any_pb2.Any`): + Optional. Service configuration against which the + comparison will be done. For this version of API, the + supported types are + [google.api.servicemanagement.v1.ConfigRef][google.api.servicemanagement.v1.ConfigRef], + [google.api.servicemanagement.v1.ConfigSource][google.api.servicemanagement.v1.ConfigSource], + and [google.api.Service][google.api.Service] + + This corresponds to the ``old_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicemanagement_v1.types.GenerateConfigReportResponse: + Response message for + GenerateConfigReport method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([new_config, old_config]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.GenerateConfigReportRequest): + request = servicemanager.GenerateConfigReportRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if new_config is not None: + request.new_config = new_config + if old_config is not None: + request.old_config = old_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.generate_config_report] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ServiceManagerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ServiceManagerAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/client.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/client.py new file mode 100644 index 000000000000..31be6c4b543b --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/client.py @@ -0,0 +1,2637 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.servicemanagement_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api import auth_pb2 # type: ignore +from google.api import backend_pb2 # type: ignore +from google.api import billing_pb2 # type: ignore +from google.api import client_pb2 # type: ignore +from google.api import context_pb2 # type: ignore +from google.api import control_pb2 # type: ignore +from google.api import documentation_pb2 # type: ignore +from google.api import endpoint_pb2 # type: ignore +from google.api import http_pb2 # type: ignore +from google.api import log_pb2 # type: ignore +from google.api import logging_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.api import monitoring_pb2 # type: ignore +from google.api import quota_pb2 # type: ignore +from google.api import service_pb2 # type: ignore +from google.api import source_info_pb2 # type: ignore +from google.api import system_parameter_pb2 # type: ignore +from google.api import usage_pb2 # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.servicemanagement_v1.services.service_manager import pagers +from google.cloud.servicemanagement_v1.types import resources +from google.cloud.servicemanagement_v1.types import servicemanager +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import api_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import type_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from .transports.base import ServiceManagerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import ServiceManagerGrpcTransport +from .transports.grpc_asyncio import ServiceManagerGrpcAsyncIOTransport +from .transports.rest import ServiceManagerRestTransport + + +class ServiceManagerClientMeta(type): + """Metaclass for the ServiceManager client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ServiceManagerTransport]] + _transport_registry["grpc"] = ServiceManagerGrpcTransport + _transport_registry["grpc_asyncio"] = ServiceManagerGrpcAsyncIOTransport + _transport_registry["rest"] = ServiceManagerRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ServiceManagerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ServiceManagerClient(metaclass=ServiceManagerClientMeta): + """`Google Service Management + API `__ + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "servicemanagement.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "servicemanagement.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ServiceManagerTransport: + """Returns the transport used by the client instance. + + Returns: + ServiceManagerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = ServiceManagerClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = ServiceManagerClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ServiceManagerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ServiceManagerClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ServiceManagerTransport, Callable[..., ServiceManagerTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the service manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ServiceManagerTransport,Callable[..., ServiceManagerTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ServiceManagerTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ServiceManagerClient._read_environment_variables() + self._client_cert_source = ServiceManagerClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = ServiceManagerClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ServiceManagerTransport) + if transport_provided: + # transport is a ServiceManagerTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ServiceManagerTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + ServiceManagerClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[ServiceManagerTransport], Callable[..., ServiceManagerTransport]] = ( + ServiceManagerClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ServiceManagerTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_services(self, + request: Optional[Union[servicemanager.ListServicesRequest, dict]] = None, + *, + producer_project_id: Optional[str] = None, + consumer_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServicesPager: + r"""Lists managed services. + + Returns all public services. For authenticated users, + also returns all services the calling user has + "servicemanagement.services.get" permission for. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + def sample_list_services(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.ListServicesRequest( + ) + + # Make the request + page_result = client.list_services(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.servicemanagement_v1.types.ListServicesRequest, dict]): + The request object. Request message for ``ListServices`` method. + producer_project_id (str): + Include services produced by the + specified project. + + This corresponds to the ``producer_project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + consumer_id (str): + Include services consumed by the specified consumer. + + The Google Service Management implementation accepts the + following forms: + + - project: + + This corresponds to the ``consumer_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicemanagement_v1.services.service_manager.pagers.ListServicesPager: + Response message for ListServices method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([producer_project_id, consumer_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.ListServicesRequest): + request = servicemanager.ListServicesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if producer_project_id is not None: + request.producer_project_id = producer_project_id + if consumer_id is not None: + request.consumer_id = consumer_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_services] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListServicesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_service(self, + request: Optional[Union[servicemanager.GetServiceRequest, dict]] = None, + *, + service_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ManagedService: + r"""Gets a managed service. Authentication is required + unless the service is public. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + def sample_get_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GetServiceRequest( + service_name="service_name_value", + ) + + # Make the request + response = client.get_service(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicemanagement_v1.types.GetServiceRequest, dict]): + The request object. Request message for ``GetService`` method. + service_name (str): + Required. The name of the service. See the + ``ServiceManager`` overview for naming requirements. For + example: ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicemanagement_v1.types.ManagedService: + The full representation of a Service + that is managed by Google Service + Management. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.GetServiceRequest): + request = servicemanager.GetServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_service(self, + request: Optional[Union[servicemanager.CreateServiceRequest, dict]] = None, + *, + service: Optional[resources.ManagedService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new managed service. + + A managed service is immutable, and is subject to + mandatory 30-day data retention. You cannot move a + service or recreate it within 30 days after deletion. + + One producer project can own no more than 500 services. + For security and reliability purposes, a production + service should be hosted in a dedicated producer + project. + + Operation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + def sample_create_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.CreateServiceRequest( + ) + + # Make the request + operation = client.create_service(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicemanagement_v1.types.CreateServiceRequest, dict]): + The request object. Request message for CreateService + method. + service (google.cloud.servicemanagement_v1.types.ManagedService): + Required. Initial values for the + service resource. + + This corresponds to the ``service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.servicemanagement_v1.types.ManagedService` The full representation of a Service that is managed by + Google Service Management. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.CreateServiceRequest): + request = servicemanager.CreateServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service is not None: + request.service = service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_service] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.ManagedService, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_service(self, + request: Optional[Union[servicemanager.DeleteServiceRequest, dict]] = None, + *, + service_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a managed service. This method will change the service + to the ``Soft-Delete`` state for 30 days. Within this period, + service producers may call + [UndeleteService][google.api.servicemanagement.v1.ServiceManager.UndeleteService] + to restore the service. After 30 days, the service will be + permanently deleted. + + Operation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + def sample_delete_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.DeleteServiceRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.delete_service(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicemanagement_v1.types.DeleteServiceRequest, dict]): + The request object. Request message for DeleteService + method. + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.DeleteServiceRequest): + request = servicemanager.DeleteServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + def undelete_service(self, + request: Optional[Union[servicemanager.UndeleteServiceRequest, dict]] = None, + *, + service_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Revives a previously deleted managed service. The + method restores the service using the configuration at + the time the service was deleted. The target service + must exist and must have been deleted within the last 30 + days. + + Operation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + def sample_undelete_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.UndeleteServiceRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.undelete_service(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicemanagement_v1.types.UndeleteServiceRequest, dict]): + The request object. Request message for UndeleteService + method. + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.servicemanagement_v1.types.UndeleteServiceResponse` + Response message for UndeleteService method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.UndeleteServiceRequest): + request = servicemanager.UndeleteServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.undelete_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + servicemanager.UndeleteServiceResponse, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_service_configs(self, + request: Optional[Union[servicemanager.ListServiceConfigsRequest, dict]] = None, + *, + service_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServiceConfigsPager: + r"""Lists the history of the service configuration for a + managed service, from the newest to the oldest. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + def sample_list_service_configs(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.ListServiceConfigsRequest( + service_name="service_name_value", + ) + + # Make the request + page_result = client.list_service_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.servicemanagement_v1.types.ListServiceConfigsRequest, dict]): + The request object. Request message for + ListServiceConfigs method. + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicemanagement_v1.services.service_manager.pagers.ListServiceConfigsPager: + Response message for + ListServiceConfigs method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.ListServiceConfigsRequest): + request = servicemanager.ListServiceConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_service_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListServiceConfigsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_service_config(self, + request: Optional[Union[servicemanager.GetServiceConfigRequest, dict]] = None, + *, + service_name: Optional[str] = None, + config_id: Optional[str] = None, + view: Optional[servicemanager.GetServiceConfigRequest.ConfigView] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service_pb2.Service: + r"""Gets a service configuration (version) for a managed + service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + def sample_get_service_config(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GetServiceConfigRequest( + service_name="service_name_value", + config_id="config_id_value", + ) + + # Make the request + response = client.get_service_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicemanagement_v1.types.GetServiceConfigRequest, dict]): + The request object. Request message for GetServiceConfig + method. + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config_id (str): + Required. The id of the service configuration resource. + + This field must be specified for the server to return + all fields, including ``SourceInfo``. + + This corresponds to the ``config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + view (google.cloud.servicemanagement_v1.types.GetServiceConfigRequest.ConfigView): + Specifies which parts of the Service + Config should be returned in the + response. + + This corresponds to the ``view`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api.service_pb2.Service: + Service is the root object of Google API service configuration (service + config). It describes the basic information about a + logical service, such as the service name and the + user-facing title, and delegates other aspects to + sub-sections. Each sub-section is either a proto + message or a repeated proto message that configures a + specific aspect, such as auth. For more information, + see each proto message definition. + + Example: + + type: google.api.Service name: + calendar.googleapis.com title: Google Calendar API + apis: - name: google.calendar.v3.Calendar + + visibility: + rules: - selector: "google.calendar.v3.*" + restriction: PREVIEW + + backend: + rules: - selector: "google.calendar.v3.*" + address: calendar.example.com + + authentication: + providers: - id: google_calendar_auth jwks_uri: + https://www.googleapis.com/oauth2/v1/certs + issuer: https://securetoken.google.com rules: - + selector: "*" requirements: provider_id: + google_calendar_auth + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name, config_id, view]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.GetServiceConfigRequest): + request = servicemanager.GetServiceConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + if config_id is not None: + request.config_id = config_id + if view is not None: + request.view = view + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_service_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + ("config_id", request.config_id), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_service_config(self, + request: Optional[Union[servicemanager.CreateServiceConfigRequest, dict]] = None, + *, + service_name: Optional[str] = None, + service_config: Optional[service_pb2.Service] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service_pb2.Service: + r"""Creates a new service configuration (version) for a managed + service. This method only stores the service configuration. To + roll out the service configuration to backend systems please + call + [CreateServiceRollout][google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout]. + + Only the 100 most recent service configurations and ones + referenced by existing rollouts are kept for each service. The + rest will be deleted eventually. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + def sample_create_service_config(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.CreateServiceConfigRequest( + service_name="service_name_value", + ) + + # Make the request + response = client.create_service_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicemanagement_v1.types.CreateServiceConfigRequest, dict]): + The request object. Request message for + CreateServiceConfig method. + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service_config (google.api.service_pb2.Service): + Required. The service configuration + resource. + + This corresponds to the ``service_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api.service_pb2.Service: + Service is the root object of Google API service configuration (service + config). It describes the basic information about a + logical service, such as the service name and the + user-facing title, and delegates other aspects to + sub-sections. Each sub-section is either a proto + message or a repeated proto message that configures a + specific aspect, such as auth. For more information, + see each proto message definition. + + Example: + + type: google.api.Service name: + calendar.googleapis.com title: Google Calendar API + apis: - name: google.calendar.v3.Calendar + + visibility: + rules: - selector: "google.calendar.v3.*" + restriction: PREVIEW + + backend: + rules: - selector: "google.calendar.v3.*" + address: calendar.example.com + + authentication: + providers: - id: google_calendar_auth jwks_uri: + https://www.googleapis.com/oauth2/v1/certs + issuer: https://securetoken.google.com rules: - + selector: "*" requirements: provider_id: + google_calendar_auth + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name, service_config]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.CreateServiceConfigRequest): + request = servicemanager.CreateServiceConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + if service_config is not None: + request.service_config = service_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_service_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def submit_config_source(self, + request: Optional[Union[servicemanager.SubmitConfigSourceRequest, dict]] = None, + *, + service_name: Optional[str] = None, + config_source: Optional[resources.ConfigSource] = None, + validate_only: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new service configuration (version) for a managed + service based on user-supplied configuration source files (for + example: OpenAPI Specification). This method stores the source + configurations as well as the generated service configuration. + To rollout the service configuration to other services, please + call + [CreateServiceRollout][google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout]. + + Only the 100 most recent configuration sources and ones + referenced by existing service configurtions are kept for each + service. The rest will be deleted eventually. + + Operation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + def sample_submit_config_source(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.SubmitConfigSourceRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.submit_config_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicemanagement_v1.types.SubmitConfigSourceRequest, dict]): + The request object. Request message for + SubmitConfigSource method. + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config_source (google.cloud.servicemanagement_v1.types.ConfigSource): + Required. The source configuration + for the service. + + This corresponds to the ``config_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + validate_only (bool): + Optional. If set, this will result in the generation of + a ``google.api.Service`` configuration based on the + ``ConfigSource`` provided, but the generated config and + the sources will NOT be persisted. + + This corresponds to the ``validate_only`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.servicemanagement_v1.types.SubmitConfigSourceResponse` + Response message for SubmitConfigSource method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name, config_source, validate_only]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.SubmitConfigSourceRequest): + request = servicemanager.SubmitConfigSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + if config_source is not None: + request.config_source = config_source + if validate_only is not None: + request.validate_only = validate_only + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.submit_config_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + servicemanager.SubmitConfigSourceResponse, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_service_rollouts(self, + request: Optional[Union[servicemanager.ListServiceRolloutsRequest, dict]] = None, + *, + service_name: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServiceRolloutsPager: + r"""Lists the history of the service configuration + rollouts for a managed service, from the newest to the + oldest. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + def sample_list_service_rollouts(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.ListServiceRolloutsRequest( + service_name="service_name_value", + filter="filter_value", + ) + + # Make the request + page_result = client.list_service_rollouts(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.servicemanagement_v1.types.ListServiceRolloutsRequest, dict]): + The request object. Request message for + 'ListServiceRollouts' + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (str): + Required. Use ``filter`` to return subset of rollouts. + The following filters are supported: + + -- By [status] + [google.api.servicemanagement.v1.Rollout.RolloutStatus]. + For example, ``filter='status=SUCCESS'`` + + -- By [strategy] + [google.api.servicemanagement.v1.Rollout.strategy]. For + example, ``filter='strategy=TrafficPercentStrategy'`` + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicemanagement_v1.services.service_manager.pagers.ListServiceRolloutsPager: + Response message for + ListServiceRollouts method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name, filter]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.ListServiceRolloutsRequest): + request = servicemanager.ListServiceRolloutsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_service_rollouts] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListServiceRolloutsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_service_rollout(self, + request: Optional[Union[servicemanager.GetServiceRolloutRequest, dict]] = None, + *, + service_name: Optional[str] = None, + rollout_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Rollout: + r"""Gets a service configuration + [rollout][google.api.servicemanagement.v1.Rollout]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + def sample_get_service_rollout(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GetServiceRolloutRequest( + service_name="service_name_value", + rollout_id="rollout_id_value", + ) + + # Make the request + response = client.get_service_rollout(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicemanagement_v1.types.GetServiceRolloutRequest, dict]): + The request object. Request message for GetServiceRollout + method. + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rollout_id (str): + Required. The id of the rollout + resource. + + This corresponds to the ``rollout_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicemanagement_v1.types.Rollout: + A rollout resource that defines how + service configuration versions are + pushed to control plane systems. + Typically, you create a new version of + the service config, and then create a + Rollout to push the service config. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name, rollout_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.GetServiceRolloutRequest): + request = servicemanager.GetServiceRolloutRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + if rollout_id is not None: + request.rollout_id = rollout_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_service_rollout] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + ("rollout_id", request.rollout_id), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_service_rollout(self, + request: Optional[Union[servicemanager.CreateServiceRolloutRequest, dict]] = None, + *, + service_name: Optional[str] = None, + rollout: Optional[resources.Rollout] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new service configuration rollout. Based on + rollout, the Google Service Management will roll out the + service configurations to different backend services. + For example, the logging configuration will be pushed to + Google Cloud Logging. + + Please note that any previous pending and running + Rollouts and associated Operations will be automatically + cancelled so that the latest Rollout will not be blocked + by previous Rollouts. + + Only the 100 most recent (in any state) and the last 10 + successful (if not already part of the set of 100 most + recent) rollouts are kept for each service. The rest + will be deleted eventually. + + Operation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + def sample_create_service_rollout(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.CreateServiceRolloutRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.create_service_rollout(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicemanagement_v1.types.CreateServiceRolloutRequest, dict]): + The request object. Request message for + 'CreateServiceRollout' + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + + This corresponds to the ``service_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rollout (google.cloud.servicemanagement_v1.types.Rollout): + Required. The rollout resource. The ``service_name`` + field is output only. + + This corresponds to the ``rollout`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.servicemanagement_v1.types.Rollout` A rollout resource that defines how service configuration versions are pushed + to control plane systems. Typically, you create a new + version of the service config, and then create a + Rollout to push the service config. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service_name, rollout]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.CreateServiceRolloutRequest): + request = servicemanager.CreateServiceRolloutRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service_name is not None: + request.service_name = service_name + if rollout is not None: + request.rollout = rollout + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_service_rollout] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("service_name", request.service_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Rollout, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + def generate_config_report(self, + request: Optional[Union[servicemanager.GenerateConfigReportRequest, dict]] = None, + *, + new_config: Optional[any_pb2.Any] = None, + old_config: Optional[any_pb2.Any] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> servicemanager.GenerateConfigReportResponse: + r"""Generates and returns a report (errors, warnings and changes + from existing configurations) associated with + GenerateConfigReportRequest.new_value + + If GenerateConfigReportRequest.old_value is specified, + GenerateConfigReportRequest will contain a single ChangeReport + based on the comparison between + GenerateConfigReportRequest.new_value and + GenerateConfigReportRequest.old_value. If + GenerateConfigReportRequest.old_value is not specified, this + method will compare GenerateConfigReportRequest.new_value with + the last pushed service configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicemanagement_v1 + + def sample_generate_config_report(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GenerateConfigReportRequest( + ) + + # Make the request + response = client.generate_config_report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicemanagement_v1.types.GenerateConfigReportRequest, dict]): + The request object. Request message for + GenerateConfigReport method. + new_config (google.protobuf.any_pb2.Any): + Required. Service configuration for which we want to + generate the report. For this version of API, the + supported types are + [google.api.servicemanagement.v1.ConfigRef][google.api.servicemanagement.v1.ConfigRef], + [google.api.servicemanagement.v1.ConfigSource][google.api.servicemanagement.v1.ConfigSource], + and [google.api.Service][google.api.Service] + + This corresponds to the ``new_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + old_config (google.protobuf.any_pb2.Any): + Optional. Service configuration against which the + comparison will be done. For this version of API, the + supported types are + [google.api.servicemanagement.v1.ConfigRef][google.api.servicemanagement.v1.ConfigRef], + [google.api.servicemanagement.v1.ConfigSource][google.api.servicemanagement.v1.ConfigSource], + and [google.api.Service][google.api.Service] + + This corresponds to the ``old_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicemanagement_v1.types.GenerateConfigReportResponse: + Response message for + GenerateConfigReport method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([new_config, old_config]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, servicemanager.GenerateConfigReportRequest): + request = servicemanager.GenerateConfigReportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if new_config is not None: + request.new_config = new_config + if old_config is not None: + request.old_config = old_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_config_report] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ServiceManagerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ServiceManagerClient", +) diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/pagers.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/pagers.py new file mode 100644 index 000000000000..22cc89b5dbbf --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/pagers.py @@ -0,0 +1,434 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.api import service_pb2 # type: ignore +from google.cloud.servicemanagement_v1.types import resources +from google.cloud.servicemanagement_v1.types import servicemanager + + +class ListServicesPager: + """A pager for iterating through ``list_services`` requests. + + This class thinly wraps an initial + :class:`google.cloud.servicemanagement_v1.types.ListServicesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``services`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListServices`` requests and continue to iterate + through the ``services`` field on the + corresponding responses. + + All the usual :class:`google.cloud.servicemanagement_v1.types.ListServicesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., servicemanager.ListServicesResponse], + request: servicemanager.ListServicesRequest, + response: servicemanager.ListServicesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.servicemanagement_v1.types.ListServicesRequest): + The initial request object. + response (google.cloud.servicemanagement_v1.types.ListServicesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = servicemanager.ListServicesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[servicemanager.ListServicesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.ManagedService]: + for page in self.pages: + yield from page.services + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListServicesAsyncPager: + """A pager for iterating through ``list_services`` requests. + + This class thinly wraps an initial + :class:`google.cloud.servicemanagement_v1.types.ListServicesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``services`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListServices`` requests and continue to iterate + through the ``services`` field on the + corresponding responses. + + All the usual :class:`google.cloud.servicemanagement_v1.types.ListServicesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[servicemanager.ListServicesResponse]], + request: servicemanager.ListServicesRequest, + response: servicemanager.ListServicesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.servicemanagement_v1.types.ListServicesRequest): + The initial request object. + response (google.cloud.servicemanagement_v1.types.ListServicesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = servicemanager.ListServicesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[servicemanager.ListServicesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.ManagedService]: + async def async_generator(): + async for page in self.pages: + for response in page.services: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListServiceConfigsPager: + """A pager for iterating through ``list_service_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.servicemanagement_v1.types.ListServiceConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``service_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListServiceConfigs`` requests and continue to iterate + through the ``service_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.servicemanagement_v1.types.ListServiceConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., servicemanager.ListServiceConfigsResponse], + request: servicemanager.ListServiceConfigsRequest, + response: servicemanager.ListServiceConfigsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.servicemanagement_v1.types.ListServiceConfigsRequest): + The initial request object. + response (google.cloud.servicemanagement_v1.types.ListServiceConfigsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = servicemanager.ListServiceConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[servicemanager.ListServiceConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[service_pb2.Service]: + for page in self.pages: + yield from page.service_configs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListServiceConfigsAsyncPager: + """A pager for iterating through ``list_service_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.servicemanagement_v1.types.ListServiceConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``service_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListServiceConfigs`` requests and continue to iterate + through the ``service_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.servicemanagement_v1.types.ListServiceConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[servicemanager.ListServiceConfigsResponse]], + request: servicemanager.ListServiceConfigsRequest, + response: servicemanager.ListServiceConfigsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.servicemanagement_v1.types.ListServiceConfigsRequest): + The initial request object. + response (google.cloud.servicemanagement_v1.types.ListServiceConfigsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = servicemanager.ListServiceConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[servicemanager.ListServiceConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[service_pb2.Service]: + async def async_generator(): + async for page in self.pages: + for response in page.service_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListServiceRolloutsPager: + """A pager for iterating through ``list_service_rollouts`` requests. + + This class thinly wraps an initial + :class:`google.cloud.servicemanagement_v1.types.ListServiceRolloutsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``rollouts`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListServiceRollouts`` requests and continue to iterate + through the ``rollouts`` field on the + corresponding responses. + + All the usual :class:`google.cloud.servicemanagement_v1.types.ListServiceRolloutsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., servicemanager.ListServiceRolloutsResponse], + request: servicemanager.ListServiceRolloutsRequest, + response: servicemanager.ListServiceRolloutsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.servicemanagement_v1.types.ListServiceRolloutsRequest): + The initial request object. + response (google.cloud.servicemanagement_v1.types.ListServiceRolloutsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = servicemanager.ListServiceRolloutsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[servicemanager.ListServiceRolloutsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Rollout]: + for page in self.pages: + yield from page.rollouts + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListServiceRolloutsAsyncPager: + """A pager for iterating through ``list_service_rollouts`` requests. + + This class thinly wraps an initial + :class:`google.cloud.servicemanagement_v1.types.ListServiceRolloutsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``rollouts`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListServiceRollouts`` requests and continue to iterate + through the ``rollouts`` field on the + corresponding responses. + + All the usual :class:`google.cloud.servicemanagement_v1.types.ListServiceRolloutsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[servicemanager.ListServiceRolloutsResponse]], + request: servicemanager.ListServiceRolloutsRequest, + response: servicemanager.ListServiceRolloutsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.servicemanagement_v1.types.ListServiceRolloutsRequest): + The initial request object. + response (google.cloud.servicemanagement_v1.types.ListServiceRolloutsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = servicemanager.ListServiceRolloutsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[servicemanager.ListServiceRolloutsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.Rollout]: + async def async_generator(): + async for page in self.pages: + for response in page.rollouts: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/README.rst b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/README.rst new file mode 100644 index 000000000000..39f819d78267 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`ServiceManagerTransport` is the ABC for all transports. +- public child `ServiceManagerGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `ServiceManagerGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseServiceManagerRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `ServiceManagerRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/__init__.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/__init__.py new file mode 100644 index 000000000000..c2df30ff9129 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ServiceManagerTransport +from .grpc import ServiceManagerGrpcTransport +from .grpc_asyncio import ServiceManagerGrpcAsyncIOTransport +from .rest import ServiceManagerRestTransport +from .rest import ServiceManagerRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ServiceManagerTransport]] +_transport_registry['grpc'] = ServiceManagerGrpcTransport +_transport_registry['grpc_asyncio'] = ServiceManagerGrpcAsyncIOTransport +_transport_registry['rest'] = ServiceManagerRestTransport + +__all__ = ( + 'ServiceManagerTransport', + 'ServiceManagerGrpcTransport', + 'ServiceManagerGrpcAsyncIOTransport', + 'ServiceManagerRestTransport', + 'ServiceManagerRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/base.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/base.py new file mode 100644 index 000000000000..864031ae421a --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/base.py @@ -0,0 +1,395 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.servicemanagement_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import service_pb2 # type: ignore +from google.cloud.servicemanagement_v1.types import resources +from google.cloud.servicemanagement_v1.types import servicemanager +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ServiceManagerTransport(abc.ABC): + """Abstract transport class for ServiceManager.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/service.management', + 'https://www.googleapis.com/auth/service.management.readonly', + ) + + DEFAULT_HOST: str = 'servicemanagement.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicemanagement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_services: gapic_v1.method.wrap_method( + self.list_services, + default_timeout=None, + client_info=client_info, + ), + self.get_service: gapic_v1.method.wrap_method( + self.get_service, + default_timeout=None, + client_info=client_info, + ), + self.create_service: gapic_v1.method.wrap_method( + self.create_service, + default_timeout=None, + client_info=client_info, + ), + self.delete_service: gapic_v1.method.wrap_method( + self.delete_service, + default_timeout=None, + client_info=client_info, + ), + self.undelete_service: gapic_v1.method.wrap_method( + self.undelete_service, + default_timeout=None, + client_info=client_info, + ), + self.list_service_configs: gapic_v1.method.wrap_method( + self.list_service_configs, + default_timeout=None, + client_info=client_info, + ), + self.get_service_config: gapic_v1.method.wrap_method( + self.get_service_config, + default_timeout=None, + client_info=client_info, + ), + self.create_service_config: gapic_v1.method.wrap_method( + self.create_service_config, + default_timeout=None, + client_info=client_info, + ), + self.submit_config_source: gapic_v1.method.wrap_method( + self.submit_config_source, + default_timeout=None, + client_info=client_info, + ), + self.list_service_rollouts: gapic_v1.method.wrap_method( + self.list_service_rollouts, + default_timeout=None, + client_info=client_info, + ), + self.get_service_rollout: gapic_v1.method.wrap_method( + self.get_service_rollout, + default_timeout=None, + client_info=client_info, + ), + self.create_service_rollout: gapic_v1.method.wrap_method( + self.create_service_rollout, + default_timeout=None, + client_info=client_info, + ), + self.generate_config_report: gapic_v1.method.wrap_method( + self.generate_config_report, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_services(self) -> Callable[ + [servicemanager.ListServicesRequest], + Union[ + servicemanager.ListServicesResponse, + Awaitable[servicemanager.ListServicesResponse] + ]]: + raise NotImplementedError() + + @property + def get_service(self) -> Callable[ + [servicemanager.GetServiceRequest], + Union[ + resources.ManagedService, + Awaitable[resources.ManagedService] + ]]: + raise NotImplementedError() + + @property + def create_service(self) -> Callable[ + [servicemanager.CreateServiceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_service(self) -> Callable[ + [servicemanager.DeleteServiceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def undelete_service(self) -> Callable[ + [servicemanager.UndeleteServiceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_service_configs(self) -> Callable[ + [servicemanager.ListServiceConfigsRequest], + Union[ + servicemanager.ListServiceConfigsResponse, + Awaitable[servicemanager.ListServiceConfigsResponse] + ]]: + raise NotImplementedError() + + @property + def get_service_config(self) -> Callable[ + [servicemanager.GetServiceConfigRequest], + Union[ + service_pb2.Service, + Awaitable[service_pb2.Service] + ]]: + raise NotImplementedError() + + @property + def create_service_config(self) -> Callable[ + [servicemanager.CreateServiceConfigRequest], + Union[ + service_pb2.Service, + Awaitable[service_pb2.Service] + ]]: + raise NotImplementedError() + + @property + def submit_config_source(self) -> Callable[ + [servicemanager.SubmitConfigSourceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_service_rollouts(self) -> Callable[ + [servicemanager.ListServiceRolloutsRequest], + Union[ + servicemanager.ListServiceRolloutsResponse, + Awaitable[servicemanager.ListServiceRolloutsResponse] + ]]: + raise NotImplementedError() + + @property + def get_service_rollout(self) -> Callable[ + [servicemanager.GetServiceRolloutRequest], + Union[ + resources.Rollout, + Awaitable[resources.Rollout] + ]]: + raise NotImplementedError() + + @property + def create_service_rollout(self) -> Callable[ + [servicemanager.CreateServiceRolloutRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def generate_config_report(self) -> Callable[ + [servicemanager.GenerateConfigReportRequest], + Union[ + servicemanager.GenerateConfigReportResponse, + Awaitable[servicemanager.GenerateConfigReportResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'ServiceManagerTransport', +) diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/grpc.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/grpc.py new file mode 100644 index 000000000000..bf47d10aec1d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/grpc.py @@ -0,0 +1,784 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.api import service_pb2 # type: ignore +from google.cloud.servicemanagement_v1.types import resources +from google.cloud.servicemanagement_v1.types import servicemanager +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import ServiceManagerTransport, DEFAULT_CLIENT_INFO + + +class ServiceManagerGrpcTransport(ServiceManagerTransport): + """gRPC backend transport for ServiceManager. + + `Google Service Management + API `__ + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'servicemanagement.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicemanagement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'servicemanagement.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_services(self) -> Callable[ + [servicemanager.ListServicesRequest], + servicemanager.ListServicesResponse]: + r"""Return a callable for the list services method over gRPC. + + Lists managed services. + + Returns all public services. For authenticated users, + also returns all services the calling user has + "servicemanagement.services.get" permission for. + + Returns: + Callable[[~.ListServicesRequest], + ~.ListServicesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_services' not in self._stubs: + self._stubs['list_services'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/ListServices', + request_serializer=servicemanager.ListServicesRequest.serialize, + response_deserializer=servicemanager.ListServicesResponse.deserialize, + ) + return self._stubs['list_services'] + + @property + def get_service(self) -> Callable[ + [servicemanager.GetServiceRequest], + resources.ManagedService]: + r"""Return a callable for the get service method over gRPC. + + Gets a managed service. Authentication is required + unless the service is public. + + Returns: + Callable[[~.GetServiceRequest], + ~.ManagedService]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_service' not in self._stubs: + self._stubs['get_service'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/GetService', + request_serializer=servicemanager.GetServiceRequest.serialize, + response_deserializer=resources.ManagedService.deserialize, + ) + return self._stubs['get_service'] + + @property + def create_service(self) -> Callable[ + [servicemanager.CreateServiceRequest], + operations_pb2.Operation]: + r"""Return a callable for the create service method over gRPC. + + Creates a new managed service. + + A managed service is immutable, and is subject to + mandatory 30-day data retention. You cannot move a + service or recreate it within 30 days after deletion. + + One producer project can own no more than 500 services. + For security and reliability purposes, a production + service should be hosted in a dedicated producer + project. + + Operation + + Returns: + Callable[[~.CreateServiceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_service' not in self._stubs: + self._stubs['create_service'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/CreateService', + request_serializer=servicemanager.CreateServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_service'] + + @property + def delete_service(self) -> Callable[ + [servicemanager.DeleteServiceRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete service method over gRPC. + + Deletes a managed service. This method will change the service + to the ``Soft-Delete`` state for 30 days. Within this period, + service producers may call + [UndeleteService][google.api.servicemanagement.v1.ServiceManager.UndeleteService] + to restore the service. After 30 days, the service will be + permanently deleted. + + Operation + + Returns: + Callable[[~.DeleteServiceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_service' not in self._stubs: + self._stubs['delete_service'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/DeleteService', + request_serializer=servicemanager.DeleteServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_service'] + + @property + def undelete_service(self) -> Callable[ + [servicemanager.UndeleteServiceRequest], + operations_pb2.Operation]: + r"""Return a callable for the undelete service method over gRPC. + + Revives a previously deleted managed service. The + method restores the service using the configuration at + the time the service was deleted. The target service + must exist and must have been deleted within the last 30 + days. + + Operation + + Returns: + Callable[[~.UndeleteServiceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'undelete_service' not in self._stubs: + self._stubs['undelete_service'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/UndeleteService', + request_serializer=servicemanager.UndeleteServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['undelete_service'] + + @property + def list_service_configs(self) -> Callable[ + [servicemanager.ListServiceConfigsRequest], + servicemanager.ListServiceConfigsResponse]: + r"""Return a callable for the list service configs method over gRPC. + + Lists the history of the service configuration for a + managed service, from the newest to the oldest. + + Returns: + Callable[[~.ListServiceConfigsRequest], + ~.ListServiceConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_service_configs' not in self._stubs: + self._stubs['list_service_configs'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/ListServiceConfigs', + request_serializer=servicemanager.ListServiceConfigsRequest.serialize, + response_deserializer=servicemanager.ListServiceConfigsResponse.deserialize, + ) + return self._stubs['list_service_configs'] + + @property + def get_service_config(self) -> Callable[ + [servicemanager.GetServiceConfigRequest], + service_pb2.Service]: + r"""Return a callable for the get service config method over gRPC. + + Gets a service configuration (version) for a managed + service. + + Returns: + Callable[[~.GetServiceConfigRequest], + ~.Service]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_service_config' not in self._stubs: + self._stubs['get_service_config'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/GetServiceConfig', + request_serializer=servicemanager.GetServiceConfigRequest.serialize, + response_deserializer=service_pb2.Service.FromString, + ) + return self._stubs['get_service_config'] + + @property + def create_service_config(self) -> Callable[ + [servicemanager.CreateServiceConfigRequest], + service_pb2.Service]: + r"""Return a callable for the create service config method over gRPC. + + Creates a new service configuration (version) for a managed + service. This method only stores the service configuration. To + roll out the service configuration to backend systems please + call + [CreateServiceRollout][google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout]. + + Only the 100 most recent service configurations and ones + referenced by existing rollouts are kept for each service. The + rest will be deleted eventually. + + Returns: + Callable[[~.CreateServiceConfigRequest], + ~.Service]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_service_config' not in self._stubs: + self._stubs['create_service_config'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/CreateServiceConfig', + request_serializer=servicemanager.CreateServiceConfigRequest.serialize, + response_deserializer=service_pb2.Service.FromString, + ) + return self._stubs['create_service_config'] + + @property + def submit_config_source(self) -> Callable[ + [servicemanager.SubmitConfigSourceRequest], + operations_pb2.Operation]: + r"""Return a callable for the submit config source method over gRPC. + + Creates a new service configuration (version) for a managed + service based on user-supplied configuration source files (for + example: OpenAPI Specification). This method stores the source + configurations as well as the generated service configuration. + To rollout the service configuration to other services, please + call + [CreateServiceRollout][google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout]. + + Only the 100 most recent configuration sources and ones + referenced by existing service configurtions are kept for each + service. The rest will be deleted eventually. + + Operation + + Returns: + Callable[[~.SubmitConfigSourceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'submit_config_source' not in self._stubs: + self._stubs['submit_config_source'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/SubmitConfigSource', + request_serializer=servicemanager.SubmitConfigSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['submit_config_source'] + + @property + def list_service_rollouts(self) -> Callable[ + [servicemanager.ListServiceRolloutsRequest], + servicemanager.ListServiceRolloutsResponse]: + r"""Return a callable for the list service rollouts method over gRPC. + + Lists the history of the service configuration + rollouts for a managed service, from the newest to the + oldest. + + Returns: + Callable[[~.ListServiceRolloutsRequest], + ~.ListServiceRolloutsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_service_rollouts' not in self._stubs: + self._stubs['list_service_rollouts'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/ListServiceRollouts', + request_serializer=servicemanager.ListServiceRolloutsRequest.serialize, + response_deserializer=servicemanager.ListServiceRolloutsResponse.deserialize, + ) + return self._stubs['list_service_rollouts'] + + @property + def get_service_rollout(self) -> Callable[ + [servicemanager.GetServiceRolloutRequest], + resources.Rollout]: + r"""Return a callable for the get service rollout method over gRPC. + + Gets a service configuration + [rollout][google.api.servicemanagement.v1.Rollout]. + + Returns: + Callable[[~.GetServiceRolloutRequest], + ~.Rollout]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_service_rollout' not in self._stubs: + self._stubs['get_service_rollout'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/GetServiceRollout', + request_serializer=servicemanager.GetServiceRolloutRequest.serialize, + response_deserializer=resources.Rollout.deserialize, + ) + return self._stubs['get_service_rollout'] + + @property + def create_service_rollout(self) -> Callable[ + [servicemanager.CreateServiceRolloutRequest], + operations_pb2.Operation]: + r"""Return a callable for the create service rollout method over gRPC. + + Creates a new service configuration rollout. Based on + rollout, the Google Service Management will roll out the + service configurations to different backend services. + For example, the logging configuration will be pushed to + Google Cloud Logging. + + Please note that any previous pending and running + Rollouts and associated Operations will be automatically + cancelled so that the latest Rollout will not be blocked + by previous Rollouts. + + Only the 100 most recent (in any state) and the last 10 + successful (if not already part of the set of 100 most + recent) rollouts are kept for each service. The rest + will be deleted eventually. + + Operation + + Returns: + Callable[[~.CreateServiceRolloutRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_service_rollout' not in self._stubs: + self._stubs['create_service_rollout'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/CreateServiceRollout', + request_serializer=servicemanager.CreateServiceRolloutRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_service_rollout'] + + @property + def generate_config_report(self) -> Callable[ + [servicemanager.GenerateConfigReportRequest], + servicemanager.GenerateConfigReportResponse]: + r"""Return a callable for the generate config report method over gRPC. + + Generates and returns a report (errors, warnings and changes + from existing configurations) associated with + GenerateConfigReportRequest.new_value + + If GenerateConfigReportRequest.old_value is specified, + GenerateConfigReportRequest will contain a single ChangeReport + based on the comparison between + GenerateConfigReportRequest.new_value and + GenerateConfigReportRequest.old_value. If + GenerateConfigReportRequest.old_value is not specified, this + method will compare GenerateConfigReportRequest.new_value with + the last pushed service configuration. + + Returns: + Callable[[~.GenerateConfigReportRequest], + ~.GenerateConfigReportResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_config_report' not in self._stubs: + self._stubs['generate_config_report'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/GenerateConfigReport', + request_serializer=servicemanager.GenerateConfigReportRequest.serialize, + response_deserializer=servicemanager.GenerateConfigReportResponse.deserialize, + ) + return self._stubs['generate_config_report'] + + def close(self): + self.grpc_channel.close() + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'ServiceManagerGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/grpc_asyncio.py new file mode 100644 index 000000000000..330d1d31397a --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/grpc_asyncio.py @@ -0,0 +1,885 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.api import service_pb2 # type: ignore +from google.cloud.servicemanagement_v1.types import resources +from google.cloud.servicemanagement_v1.types import servicemanager +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import ServiceManagerTransport, DEFAULT_CLIENT_INFO +from .grpc import ServiceManagerGrpcTransport + + +class ServiceManagerGrpcAsyncIOTransport(ServiceManagerTransport): + """gRPC AsyncIO backend transport for ServiceManager. + + `Google Service Management + API `__ + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'servicemanagement.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'servicemanagement.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicemanagement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_services(self) -> Callable[ + [servicemanager.ListServicesRequest], + Awaitable[servicemanager.ListServicesResponse]]: + r"""Return a callable for the list services method over gRPC. + + Lists managed services. + + Returns all public services. For authenticated users, + also returns all services the calling user has + "servicemanagement.services.get" permission for. + + Returns: + Callable[[~.ListServicesRequest], + Awaitable[~.ListServicesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_services' not in self._stubs: + self._stubs['list_services'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/ListServices', + request_serializer=servicemanager.ListServicesRequest.serialize, + response_deserializer=servicemanager.ListServicesResponse.deserialize, + ) + return self._stubs['list_services'] + + @property + def get_service(self) -> Callable[ + [servicemanager.GetServiceRequest], + Awaitable[resources.ManagedService]]: + r"""Return a callable for the get service method over gRPC. + + Gets a managed service. Authentication is required + unless the service is public. + + Returns: + Callable[[~.GetServiceRequest], + Awaitable[~.ManagedService]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_service' not in self._stubs: + self._stubs['get_service'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/GetService', + request_serializer=servicemanager.GetServiceRequest.serialize, + response_deserializer=resources.ManagedService.deserialize, + ) + return self._stubs['get_service'] + + @property + def create_service(self) -> Callable[ + [servicemanager.CreateServiceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create service method over gRPC. + + Creates a new managed service. + + A managed service is immutable, and is subject to + mandatory 30-day data retention. You cannot move a + service or recreate it within 30 days after deletion. + + One producer project can own no more than 500 services. + For security and reliability purposes, a production + service should be hosted in a dedicated producer + project. + + Operation + + Returns: + Callable[[~.CreateServiceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_service' not in self._stubs: + self._stubs['create_service'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/CreateService', + request_serializer=servicemanager.CreateServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_service'] + + @property + def delete_service(self) -> Callable[ + [servicemanager.DeleteServiceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete service method over gRPC. + + Deletes a managed service. This method will change the service + to the ``Soft-Delete`` state for 30 days. Within this period, + service producers may call + [UndeleteService][google.api.servicemanagement.v1.ServiceManager.UndeleteService] + to restore the service. After 30 days, the service will be + permanently deleted. + + Operation + + Returns: + Callable[[~.DeleteServiceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_service' not in self._stubs: + self._stubs['delete_service'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/DeleteService', + request_serializer=servicemanager.DeleteServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_service'] + + @property + def undelete_service(self) -> Callable[ + [servicemanager.UndeleteServiceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the undelete service method over gRPC. + + Revives a previously deleted managed service. The + method restores the service using the configuration at + the time the service was deleted. The target service + must exist and must have been deleted within the last 30 + days. + + Operation + + Returns: + Callable[[~.UndeleteServiceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'undelete_service' not in self._stubs: + self._stubs['undelete_service'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/UndeleteService', + request_serializer=servicemanager.UndeleteServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['undelete_service'] + + @property + def list_service_configs(self) -> Callable[ + [servicemanager.ListServiceConfigsRequest], + Awaitable[servicemanager.ListServiceConfigsResponse]]: + r"""Return a callable for the list service configs method over gRPC. + + Lists the history of the service configuration for a + managed service, from the newest to the oldest. + + Returns: + Callable[[~.ListServiceConfigsRequest], + Awaitable[~.ListServiceConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_service_configs' not in self._stubs: + self._stubs['list_service_configs'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/ListServiceConfigs', + request_serializer=servicemanager.ListServiceConfigsRequest.serialize, + response_deserializer=servicemanager.ListServiceConfigsResponse.deserialize, + ) + return self._stubs['list_service_configs'] + + @property + def get_service_config(self) -> Callable[ + [servicemanager.GetServiceConfigRequest], + Awaitable[service_pb2.Service]]: + r"""Return a callable for the get service config method over gRPC. + + Gets a service configuration (version) for a managed + service. + + Returns: + Callable[[~.GetServiceConfigRequest], + Awaitable[~.Service]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_service_config' not in self._stubs: + self._stubs['get_service_config'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/GetServiceConfig', + request_serializer=servicemanager.GetServiceConfigRequest.serialize, + response_deserializer=service_pb2.Service.FromString, + ) + return self._stubs['get_service_config'] + + @property + def create_service_config(self) -> Callable[ + [servicemanager.CreateServiceConfigRequest], + Awaitable[service_pb2.Service]]: + r"""Return a callable for the create service config method over gRPC. + + Creates a new service configuration (version) for a managed + service. This method only stores the service configuration. To + roll out the service configuration to backend systems please + call + [CreateServiceRollout][google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout]. + + Only the 100 most recent service configurations and ones + referenced by existing rollouts are kept for each service. The + rest will be deleted eventually. + + Returns: + Callable[[~.CreateServiceConfigRequest], + Awaitable[~.Service]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_service_config' not in self._stubs: + self._stubs['create_service_config'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/CreateServiceConfig', + request_serializer=servicemanager.CreateServiceConfigRequest.serialize, + response_deserializer=service_pb2.Service.FromString, + ) + return self._stubs['create_service_config'] + + @property + def submit_config_source(self) -> Callable[ + [servicemanager.SubmitConfigSourceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the submit config source method over gRPC. + + Creates a new service configuration (version) for a managed + service based on user-supplied configuration source files (for + example: OpenAPI Specification). This method stores the source + configurations as well as the generated service configuration. + To rollout the service configuration to other services, please + call + [CreateServiceRollout][google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout]. + + Only the 100 most recent configuration sources and ones + referenced by existing service configurtions are kept for each + service. The rest will be deleted eventually. + + Operation + + Returns: + Callable[[~.SubmitConfigSourceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'submit_config_source' not in self._stubs: + self._stubs['submit_config_source'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/SubmitConfigSource', + request_serializer=servicemanager.SubmitConfigSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['submit_config_source'] + + @property + def list_service_rollouts(self) -> Callable[ + [servicemanager.ListServiceRolloutsRequest], + Awaitable[servicemanager.ListServiceRolloutsResponse]]: + r"""Return a callable for the list service rollouts method over gRPC. + + Lists the history of the service configuration + rollouts for a managed service, from the newest to the + oldest. + + Returns: + Callable[[~.ListServiceRolloutsRequest], + Awaitable[~.ListServiceRolloutsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_service_rollouts' not in self._stubs: + self._stubs['list_service_rollouts'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/ListServiceRollouts', + request_serializer=servicemanager.ListServiceRolloutsRequest.serialize, + response_deserializer=servicemanager.ListServiceRolloutsResponse.deserialize, + ) + return self._stubs['list_service_rollouts'] + + @property + def get_service_rollout(self) -> Callable[ + [servicemanager.GetServiceRolloutRequest], + Awaitable[resources.Rollout]]: + r"""Return a callable for the get service rollout method over gRPC. + + Gets a service configuration + [rollout][google.api.servicemanagement.v1.Rollout]. + + Returns: + Callable[[~.GetServiceRolloutRequest], + Awaitable[~.Rollout]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_service_rollout' not in self._stubs: + self._stubs['get_service_rollout'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/GetServiceRollout', + request_serializer=servicemanager.GetServiceRolloutRequest.serialize, + response_deserializer=resources.Rollout.deserialize, + ) + return self._stubs['get_service_rollout'] + + @property + def create_service_rollout(self) -> Callable[ + [servicemanager.CreateServiceRolloutRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create service rollout method over gRPC. + + Creates a new service configuration rollout. Based on + rollout, the Google Service Management will roll out the + service configurations to different backend services. + For example, the logging configuration will be pushed to + Google Cloud Logging. + + Please note that any previous pending and running + Rollouts and associated Operations will be automatically + cancelled so that the latest Rollout will not be blocked + by previous Rollouts. + + Only the 100 most recent (in any state) and the last 10 + successful (if not already part of the set of 100 most + recent) rollouts are kept for each service. The rest + will be deleted eventually. + + Operation + + Returns: + Callable[[~.CreateServiceRolloutRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_service_rollout' not in self._stubs: + self._stubs['create_service_rollout'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/CreateServiceRollout', + request_serializer=servicemanager.CreateServiceRolloutRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_service_rollout'] + + @property + def generate_config_report(self) -> Callable[ + [servicemanager.GenerateConfigReportRequest], + Awaitable[servicemanager.GenerateConfigReportResponse]]: + r"""Return a callable for the generate config report method over gRPC. + + Generates and returns a report (errors, warnings and changes + from existing configurations) associated with + GenerateConfigReportRequest.new_value + + If GenerateConfigReportRequest.old_value is specified, + GenerateConfigReportRequest will contain a single ChangeReport + based on the comparison between + GenerateConfigReportRequest.new_value and + GenerateConfigReportRequest.old_value. If + GenerateConfigReportRequest.old_value is not specified, this + method will compare GenerateConfigReportRequest.new_value with + the last pushed service configuration. + + Returns: + Callable[[~.GenerateConfigReportRequest], + Awaitable[~.GenerateConfigReportResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_config_report' not in self._stubs: + self._stubs['generate_config_report'] = self.grpc_channel.unary_unary( + '/google.api.servicemanagement.v1.ServiceManager/GenerateConfigReport', + request_serializer=servicemanager.GenerateConfigReportRequest.serialize, + response_deserializer=servicemanager.GenerateConfigReportResponse.deserialize, + ) + return self._stubs['generate_config_report'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_services: self._wrap_method( + self.list_services, + default_timeout=None, + client_info=client_info, + ), + self.get_service: self._wrap_method( + self.get_service, + default_timeout=None, + client_info=client_info, + ), + self.create_service: self._wrap_method( + self.create_service, + default_timeout=None, + client_info=client_info, + ), + self.delete_service: self._wrap_method( + self.delete_service, + default_timeout=None, + client_info=client_info, + ), + self.undelete_service: self._wrap_method( + self.undelete_service, + default_timeout=None, + client_info=client_info, + ), + self.list_service_configs: self._wrap_method( + self.list_service_configs, + default_timeout=None, + client_info=client_info, + ), + self.get_service_config: self._wrap_method( + self.get_service_config, + default_timeout=None, + client_info=client_info, + ), + self.create_service_config: self._wrap_method( + self.create_service_config, + default_timeout=None, + client_info=client_info, + ), + self.submit_config_source: self._wrap_method( + self.submit_config_source, + default_timeout=None, + client_info=client_info, + ), + self.list_service_rollouts: self._wrap_method( + self.list_service_rollouts, + default_timeout=None, + client_info=client_info, + ), + self.get_service_rollout: self._wrap_method( + self.get_service_rollout, + default_timeout=None, + client_info=client_info, + ), + self.create_service_rollout: self._wrap_method( + self.create_service_rollout, + default_timeout=None, + client_info=client_info, + ), + self.generate_config_report: self._wrap_method( + self.generate_config_report, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ( + 'ServiceManagerGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/rest.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/rest.py new file mode 100644 index 000000000000..81c1091d2ad1 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/rest.py @@ -0,0 +1,2064 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.api import service_pb2 # type: ignore +from google.cloud.servicemanagement_v1.types import resources +from google.cloud.servicemanagement_v1.types import servicemanager +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseServiceManagerRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class ServiceManagerRestInterceptor: + """Interceptor for ServiceManager. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ServiceManagerRestTransport. + + .. code-block:: python + class MyCustomServiceManagerInterceptor(ServiceManagerRestInterceptor): + def pre_create_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_service(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_service_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_service_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_service_rollout(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_service_rollout(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_service(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_config_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_config_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_service(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_service_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_service_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_service_rollout(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_service_rollout(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_service_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_service_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_service_rollouts(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_service_rollouts(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_services(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_services(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_submit_config_source(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_submit_config_source(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_undelete_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_undelete_service(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ServiceManagerRestTransport(interceptor=MyCustomServiceManagerInterceptor()) + client = ServiceManagerClient(transport=transport) + + + """ + def pre_create_service(self, request: servicemanager.CreateServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[servicemanager.CreateServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_create_service(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_service + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_create_service_config(self, request: servicemanager.CreateServiceConfigRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[servicemanager.CreateServiceConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_service_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_create_service_config(self, response: service_pb2.Service) -> service_pb2.Service: + """Post-rpc interceptor for create_service_config + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_create_service_rollout(self, request: servicemanager.CreateServiceRolloutRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[servicemanager.CreateServiceRolloutRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_service_rollout + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_create_service_rollout(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_service_rollout + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_delete_service(self, request: servicemanager.DeleteServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[servicemanager.DeleteServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_delete_service(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_service + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_generate_config_report(self, request: servicemanager.GenerateConfigReportRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[servicemanager.GenerateConfigReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_config_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_generate_config_report(self, response: servicemanager.GenerateConfigReportResponse) -> servicemanager.GenerateConfigReportResponse: + """Post-rpc interceptor for generate_config_report + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_get_service(self, request: servicemanager.GetServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[servicemanager.GetServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_get_service(self, response: resources.ManagedService) -> resources.ManagedService: + """Post-rpc interceptor for get_service + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_get_service_config(self, request: servicemanager.GetServiceConfigRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[servicemanager.GetServiceConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_service_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_get_service_config(self, response: service_pb2.Service) -> service_pb2.Service: + """Post-rpc interceptor for get_service_config + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_get_service_rollout(self, request: servicemanager.GetServiceRolloutRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[servicemanager.GetServiceRolloutRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_service_rollout + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_get_service_rollout(self, response: resources.Rollout) -> resources.Rollout: + """Post-rpc interceptor for get_service_rollout + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_list_service_configs(self, request: servicemanager.ListServiceConfigsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[servicemanager.ListServiceConfigsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_service_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_list_service_configs(self, response: servicemanager.ListServiceConfigsResponse) -> servicemanager.ListServiceConfigsResponse: + """Post-rpc interceptor for list_service_configs + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_list_service_rollouts(self, request: servicemanager.ListServiceRolloutsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[servicemanager.ListServiceRolloutsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_service_rollouts + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_list_service_rollouts(self, response: servicemanager.ListServiceRolloutsResponse) -> servicemanager.ListServiceRolloutsResponse: + """Post-rpc interceptor for list_service_rollouts + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_list_services(self, request: servicemanager.ListServicesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[servicemanager.ListServicesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_services + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_list_services(self, response: servicemanager.ListServicesResponse) -> servicemanager.ListServicesResponse: + """Post-rpc interceptor for list_services + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_submit_config_source(self, request: servicemanager.SubmitConfigSourceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[servicemanager.SubmitConfigSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for submit_config_source + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_submit_config_source(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for submit_config_source + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_undelete_service(self, request: servicemanager.UndeleteServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[servicemanager.UndeleteServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for undelete_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_undelete_service(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for undelete_service + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_get_iam_policy( + self, response: policy_pb2.Policy + ) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_set_iam_policy( + self, response: policy_pb2.Policy + ) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceManager server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ServiceManager server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ServiceManagerRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ServiceManagerRestInterceptor + + +class ServiceManagerRestTransport(_BaseServiceManagerRestTransport): + """REST backend synchronous transport for ServiceManager. + + `Google Service Management + API `__ + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'servicemanagement.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[ServiceManagerRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicemanagement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ServiceManagerRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/operations', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateService(_BaseServiceManagerRestTransport._BaseCreateService, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.CreateService") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: servicemanager.CreateServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create service method over HTTP. + + Args: + request (~.servicemanager.CreateServiceRequest): + The request object. Request message for CreateService + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseServiceManagerRestTransport._BaseCreateService._get_http_options() + request, metadata = self._interceptor.pre_create_service(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseCreateService._get_transcoded_request(http_options, request) + + body = _BaseServiceManagerRestTransport._BaseCreateService._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseCreateService._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._CreateService._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_service(resp) + return resp + + class _CreateServiceConfig(_BaseServiceManagerRestTransport._BaseCreateServiceConfig, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.CreateServiceConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: servicemanager.CreateServiceConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> service_pb2.Service: + r"""Call the create service config method over HTTP. + + Args: + request (~.servicemanager.CreateServiceConfigRequest): + The request object. Request message for + CreateServiceConfig method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service_pb2.Service: + ``Service`` is the root object of Google API service + configuration (service config). It describes the basic + information about a logical service, such as the service + name and the user-facing title, and delegates other + aspects to sub-sections. Each sub-section is either a + proto message or a repeated proto message that + configures a specific aspect, such as auth. For more + information, see each proto message definition. + + Example: + + :: + + type: google.api.Service + name: calendar.googleapis.com + title: Google Calendar API + apis: + - name: google.calendar.v3.Calendar + + visibility: + rules: + - selector: "google.calendar.v3.*" + restriction: PREVIEW + backend: + rules: + - selector: "google.calendar.v3.*" + address: calendar.example.com + + authentication: + providers: + - id: google_calendar_auth + jwks_uri: https://www.googleapis.com/oauth2/v1/certs + issuer: https://securetoken.google.com + rules: + - selector: "*" + requirements: + provider_id: google_calendar_auth + + """ + + http_options = _BaseServiceManagerRestTransport._BaseCreateServiceConfig._get_http_options() + request, metadata = self._interceptor.pre_create_service_config(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseCreateServiceConfig._get_transcoded_request(http_options, request) + + body = _BaseServiceManagerRestTransport._BaseCreateServiceConfig._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseCreateServiceConfig._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._CreateServiceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service_pb2.Service() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_service_config(resp) + return resp + + class _CreateServiceRollout(_BaseServiceManagerRestTransport._BaseCreateServiceRollout, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.CreateServiceRollout") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: servicemanager.CreateServiceRolloutRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create service rollout method over HTTP. + + Args: + request (~.servicemanager.CreateServiceRolloutRequest): + The request object. Request message for + 'CreateServiceRollout' + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseServiceManagerRestTransport._BaseCreateServiceRollout._get_http_options() + request, metadata = self._interceptor.pre_create_service_rollout(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseCreateServiceRollout._get_transcoded_request(http_options, request) + + body = _BaseServiceManagerRestTransport._BaseCreateServiceRollout._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseCreateServiceRollout._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._CreateServiceRollout._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_service_rollout(resp) + return resp + + class _DeleteService(_BaseServiceManagerRestTransport._BaseDeleteService, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.DeleteService") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: servicemanager.DeleteServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete service method over HTTP. + + Args: + request (~.servicemanager.DeleteServiceRequest): + The request object. Request message for DeleteService + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseServiceManagerRestTransport._BaseDeleteService._get_http_options() + request, metadata = self._interceptor.pre_delete_service(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseDeleteService._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseDeleteService._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._DeleteService._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_service(resp) + return resp + + class _GenerateConfigReport(_BaseServiceManagerRestTransport._BaseGenerateConfigReport, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.GenerateConfigReport") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: servicemanager.GenerateConfigReportRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> servicemanager.GenerateConfigReportResponse: + r"""Call the generate config report method over HTTP. + + Args: + request (~.servicemanager.GenerateConfigReportRequest): + The request object. Request message for + GenerateConfigReport method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.servicemanager.GenerateConfigReportResponse: + Response message for + GenerateConfigReport method. + + """ + + http_options = _BaseServiceManagerRestTransport._BaseGenerateConfigReport._get_http_options() + request, metadata = self._interceptor.pre_generate_config_report(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseGenerateConfigReport._get_transcoded_request(http_options, request) + + body = _BaseServiceManagerRestTransport._BaseGenerateConfigReport._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseGenerateConfigReport._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._GenerateConfigReport._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = servicemanager.GenerateConfigReportResponse() + pb_resp = servicemanager.GenerateConfigReportResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_config_report(resp) + return resp + + class _GetService(_BaseServiceManagerRestTransport._BaseGetService, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.GetService") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: servicemanager.GetServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> resources.ManagedService: + r"""Call the get service method over HTTP. + + Args: + request (~.servicemanager.GetServiceRequest): + The request object. Request message for ``GetService`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.ManagedService: + The full representation of a Service + that is managed by Google Service + Management. + + """ + + http_options = _BaseServiceManagerRestTransport._BaseGetService._get_http_options() + request, metadata = self._interceptor.pre_get_service(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseGetService._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseGetService._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._GetService._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.ManagedService() + pb_resp = resources.ManagedService.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_service(resp) + return resp + + class _GetServiceConfig(_BaseServiceManagerRestTransport._BaseGetServiceConfig, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.GetServiceConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: servicemanager.GetServiceConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> service_pb2.Service: + r"""Call the get service config method over HTTP. + + Args: + request (~.servicemanager.GetServiceConfigRequest): + The request object. Request message for GetServiceConfig + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service_pb2.Service: + ``Service`` is the root object of Google API service + configuration (service config). It describes the basic + information about a logical service, such as the service + name and the user-facing title, and delegates other + aspects to sub-sections. Each sub-section is either a + proto message or a repeated proto message that + configures a specific aspect, such as auth. For more + information, see each proto message definition. + + Example: + + :: + + type: google.api.Service + name: calendar.googleapis.com + title: Google Calendar API + apis: + - name: google.calendar.v3.Calendar + + visibility: + rules: + - selector: "google.calendar.v3.*" + restriction: PREVIEW + backend: + rules: + - selector: "google.calendar.v3.*" + address: calendar.example.com + + authentication: + providers: + - id: google_calendar_auth + jwks_uri: https://www.googleapis.com/oauth2/v1/certs + issuer: https://securetoken.google.com + rules: + - selector: "*" + requirements: + provider_id: google_calendar_auth + + """ + + http_options = _BaseServiceManagerRestTransport._BaseGetServiceConfig._get_http_options() + request, metadata = self._interceptor.pre_get_service_config(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseGetServiceConfig._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseGetServiceConfig._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._GetServiceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service_pb2.Service() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_service_config(resp) + return resp + + class _GetServiceRollout(_BaseServiceManagerRestTransport._BaseGetServiceRollout, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.GetServiceRollout") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: servicemanager.GetServiceRolloutRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> resources.Rollout: + r"""Call the get service rollout method over HTTP. + + Args: + request (~.servicemanager.GetServiceRolloutRequest): + The request object. Request message for GetServiceRollout + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Rollout: + A rollout resource that defines how + service configuration versions are + pushed to control plane systems. + Typically, you create a new version of + the service config, and then create a + Rollout to push the service config. + + """ + + http_options = _BaseServiceManagerRestTransport._BaseGetServiceRollout._get_http_options() + request, metadata = self._interceptor.pre_get_service_rollout(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseGetServiceRollout._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseGetServiceRollout._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._GetServiceRollout._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Rollout() + pb_resp = resources.Rollout.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_service_rollout(resp) + return resp + + class _ListServiceConfigs(_BaseServiceManagerRestTransport._BaseListServiceConfigs, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.ListServiceConfigs") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: servicemanager.ListServiceConfigsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> servicemanager.ListServiceConfigsResponse: + r"""Call the list service configs method over HTTP. + + Args: + request (~.servicemanager.ListServiceConfigsRequest): + The request object. Request message for + ListServiceConfigs method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.servicemanager.ListServiceConfigsResponse: + Response message for + ListServiceConfigs method. + + """ + + http_options = _BaseServiceManagerRestTransport._BaseListServiceConfigs._get_http_options() + request, metadata = self._interceptor.pre_list_service_configs(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseListServiceConfigs._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseListServiceConfigs._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._ListServiceConfigs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = servicemanager.ListServiceConfigsResponse() + pb_resp = servicemanager.ListServiceConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_service_configs(resp) + return resp + + class _ListServiceRollouts(_BaseServiceManagerRestTransport._BaseListServiceRollouts, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.ListServiceRollouts") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: servicemanager.ListServiceRolloutsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> servicemanager.ListServiceRolloutsResponse: + r"""Call the list service rollouts method over HTTP. + + Args: + request (~.servicemanager.ListServiceRolloutsRequest): + The request object. Request message for + 'ListServiceRollouts' + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.servicemanager.ListServiceRolloutsResponse: + Response message for + ListServiceRollouts method. + + """ + + http_options = _BaseServiceManagerRestTransport._BaseListServiceRollouts._get_http_options() + request, metadata = self._interceptor.pre_list_service_rollouts(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseListServiceRollouts._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseListServiceRollouts._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._ListServiceRollouts._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = servicemanager.ListServiceRolloutsResponse() + pb_resp = servicemanager.ListServiceRolloutsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_service_rollouts(resp) + return resp + + class _ListServices(_BaseServiceManagerRestTransport._BaseListServices, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.ListServices") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: servicemanager.ListServicesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> servicemanager.ListServicesResponse: + r"""Call the list services method over HTTP. + + Args: + request (~.servicemanager.ListServicesRequest): + The request object. Request message for ``ListServices`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.servicemanager.ListServicesResponse: + Response message for ``ListServices`` method. + """ + + http_options = _BaseServiceManagerRestTransport._BaseListServices._get_http_options() + request, metadata = self._interceptor.pre_list_services(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseListServices._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseListServices._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._ListServices._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = servicemanager.ListServicesResponse() + pb_resp = servicemanager.ListServicesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_services(resp) + return resp + + class _SubmitConfigSource(_BaseServiceManagerRestTransport._BaseSubmitConfigSource, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.SubmitConfigSource") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: servicemanager.SubmitConfigSourceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the submit config source method over HTTP. + + Args: + request (~.servicemanager.SubmitConfigSourceRequest): + The request object. Request message for + SubmitConfigSource method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseServiceManagerRestTransport._BaseSubmitConfigSource._get_http_options() + request, metadata = self._interceptor.pre_submit_config_source(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseSubmitConfigSource._get_transcoded_request(http_options, request) + + body = _BaseServiceManagerRestTransport._BaseSubmitConfigSource._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseSubmitConfigSource._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._SubmitConfigSource._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_submit_config_source(resp) + return resp + + class _UndeleteService(_BaseServiceManagerRestTransport._BaseUndeleteService, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.UndeleteService") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: servicemanager.UndeleteServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the undelete service method over HTTP. + + Args: + request (~.servicemanager.UndeleteServiceRequest): + The request object. Request message for UndeleteService + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseServiceManagerRestTransport._BaseUndeleteService._get_http_options() + request, metadata = self._interceptor.pre_undelete_service(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseUndeleteService._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseUndeleteService._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._UndeleteService._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_undelete_service(resp) + return resp + + @property + def create_service(self) -> Callable[ + [servicemanager.CreateServiceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateService(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_service_config(self) -> Callable[ + [servicemanager.CreateServiceConfigRequest], + service_pb2.Service]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateServiceConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_service_rollout(self) -> Callable[ + [servicemanager.CreateServiceRolloutRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateServiceRollout(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_service(self) -> Callable[ + [servicemanager.DeleteServiceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteService(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_config_report(self) -> Callable[ + [servicemanager.GenerateConfigReportRequest], + servicemanager.GenerateConfigReportResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateConfigReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_service(self) -> Callable[ + [servicemanager.GetServiceRequest], + resources.ManagedService]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetService(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_service_config(self) -> Callable[ + [servicemanager.GetServiceConfigRequest], + service_pb2.Service]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetServiceConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_service_rollout(self) -> Callable[ + [servicemanager.GetServiceRolloutRequest], + resources.Rollout]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetServiceRollout(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_service_configs(self) -> Callable[ + [servicemanager.ListServiceConfigsRequest], + servicemanager.ListServiceConfigsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListServiceConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_service_rollouts(self) -> Callable[ + [servicemanager.ListServiceRolloutsRequest], + servicemanager.ListServiceRolloutsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListServiceRollouts(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_services(self) -> Callable[ + [servicemanager.ListServicesRequest], + servicemanager.ListServicesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListServices(self._session, self._host, self._interceptor) # type: ignore + + @property + def submit_config_source(self) -> Callable[ + [servicemanager.SubmitConfigSourceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SubmitConfigSource(self._session, self._host, self._interceptor) # type: ignore + + @property + def undelete_service(self) -> Callable[ + [servicemanager.UndeleteServiceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UndeleteService(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(_BaseServiceManagerRestTransport._BaseGetIamPolicy, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.GetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options = _BaseServiceManagerRestTransport._BaseGetIamPolicy._get_http_options() + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) + + body = _BaseServiceManagerRestTransport._BaseGetIamPolicy._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(_BaseServiceManagerRestTransport._BaseSetIamPolicy, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.SetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options = _BaseServiceManagerRestTransport._BaseSetIamPolicy._get_http_options() + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) + + body = _BaseServiceManagerRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(_BaseServiceManagerRestTransport._BaseTestIamPermissions, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.TestIamPermissionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options = _BaseServiceManagerRestTransport._BaseTestIamPermissions._get_http_options() + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) + + body = _BaseServiceManagerRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseServiceManagerRestTransport._BaseListOperations, ServiceManagerRestStub): + def __hash__(self): + return hash("ServiceManagerRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseServiceManagerRestTransport._BaseListOperations._get_http_options() + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseServiceManagerRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseServiceManagerRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceManagerRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ServiceManagerRestTransport', +) diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/rest_base.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/rest_base.py new file mode 100644 index 000000000000..5d495f355da3 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/services/service_manager/transports/rest_base.py @@ -0,0 +1,753 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from .base import ServiceManagerTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.api import service_pb2 # type: ignore +from google.cloud.servicemanagement_v1.types import resources +from google.cloud.servicemanagement_v1.types import servicemanager +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseServiceManagerRestTransport(ServiceManagerTransport): + """Base REST backend transport for ServiceManager. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'servicemanagement.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'servicemanagement.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateService: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/services', + 'body': 'service', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = servicemanager.CreateServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseServiceManagerRestTransport._BaseCreateService._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateServiceConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/services/{service_name}/configs', + 'body': 'service_config', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = servicemanager.CreateServiceConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseServiceManagerRestTransport._BaseCreateServiceConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateServiceRollout: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/services/{service_name}/rollouts', + 'body': 'rollout', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = servicemanager.CreateServiceRolloutRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseServiceManagerRestTransport._BaseCreateServiceRollout._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteService: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/services/{service_name}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = servicemanager.DeleteServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseServiceManagerRestTransport._BaseDeleteService._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGenerateConfigReport: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/services:generateConfigReport', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = servicemanager.GenerateConfigReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseServiceManagerRestTransport._BaseGenerateConfigReport._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetService: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/services/{service_name}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = servicemanager.GetServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseServiceManagerRestTransport._BaseGetService._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetServiceConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/services/{service_name}/configs/{config_id}', + }, + { + 'method': 'get', + 'uri': '/v1/services/{service_name}/config', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = servicemanager.GetServiceConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseServiceManagerRestTransport._BaseGetServiceConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetServiceRollout: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/services/{service_name}/rollouts/{rollout_id}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = servicemanager.GetServiceRolloutRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseServiceManagerRestTransport._BaseGetServiceRollout._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListServiceConfigs: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/services/{service_name}/configs', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = servicemanager.ListServiceConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseServiceManagerRestTransport._BaseListServiceConfigs._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListServiceRollouts: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "filter" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/services/{service_name}/rollouts', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = servicemanager.ListServiceRolloutsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseServiceManagerRestTransport._BaseListServiceRollouts._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListServices: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/services', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = servicemanager.ListServicesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseSubmitConfigSource: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/services/{service_name}/configs:submit', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = servicemanager.SubmitConfigSourceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseServiceManagerRestTransport._BaseSubmitConfigSource._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUndeleteService: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/services/{service_name}:undelete', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = servicemanager.UndeleteServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseServiceManagerRestTransport._BaseUndeleteService._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=services/*}:getIamPolicy', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=services/*/consumers/*}:getIamPolicy', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=services/*}:setIamPolicy', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=services/*/consumers/*}:setIamPolicy', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=services/*}:testIamPermissions', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=services/*/consumers/*}:testIamPermissions', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseServiceManagerRestTransport', +) diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/types/__init__.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/types/__init__.py new file mode 100644 index 000000000000..9ea8b851e34d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/types/__init__.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .resources import ( + ChangeReport, + ConfigFile, + ConfigRef, + ConfigSource, + Diagnostic, + ManagedService, + OperationMetadata, + Rollout, +) +from .servicemanager import ( + CreateServiceConfigRequest, + CreateServiceRequest, + CreateServiceRolloutRequest, + DeleteServiceRequest, + EnableServiceResponse, + GenerateConfigReportRequest, + GenerateConfigReportResponse, + GetServiceConfigRequest, + GetServiceRequest, + GetServiceRolloutRequest, + ListServiceConfigsRequest, + ListServiceConfigsResponse, + ListServiceRolloutsRequest, + ListServiceRolloutsResponse, + ListServicesRequest, + ListServicesResponse, + SubmitConfigSourceRequest, + SubmitConfigSourceResponse, + UndeleteServiceRequest, + UndeleteServiceResponse, +) + +__all__ = ( + 'ChangeReport', + 'ConfigFile', + 'ConfigRef', + 'ConfigSource', + 'Diagnostic', + 'ManagedService', + 'OperationMetadata', + 'Rollout', + 'CreateServiceConfigRequest', + 'CreateServiceRequest', + 'CreateServiceRolloutRequest', + 'DeleteServiceRequest', + 'EnableServiceResponse', + 'GenerateConfigReportRequest', + 'GenerateConfigReportResponse', + 'GetServiceConfigRequest', + 'GetServiceRequest', + 'GetServiceRolloutRequest', + 'ListServiceConfigsRequest', + 'ListServiceConfigsResponse', + 'ListServiceRolloutsRequest', + 'ListServiceRolloutsResponse', + 'ListServicesRequest', + 'ListServicesResponse', + 'SubmitConfigSourceRequest', + 'SubmitConfigSourceResponse', + 'UndeleteServiceRequest', + 'UndeleteServiceResponse', +) diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/types/resources.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/types/resources.py new file mode 100644 index 000000000000..05a672471944 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/types/resources.py @@ -0,0 +1,490 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.api import config_change_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.api.servicemanagement.v1', + manifest={ + 'ManagedService', + 'OperationMetadata', + 'Diagnostic', + 'ConfigSource', + 'ConfigFile', + 'ConfigRef', + 'ChangeReport', + 'Rollout', + }, +) + + +class ManagedService(proto.Message): + r"""The full representation of a Service that is managed by + Google Service Management. + + Attributes: + service_name (str): + The name of the service. See the + `overview `__ + for naming requirements. + producer_project_id (str): + ID of the project that produces and owns this + service. + """ + + service_name: str = proto.Field( + proto.STRING, + number=2, + ) + producer_project_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class OperationMetadata(proto.Message): + r"""The metadata associated with a long running operation + resource. + + Attributes: + resource_names (MutableSequence[str]): + The full name of the resources that this + operation is directly associated with. + steps (MutableSequence[google.cloud.servicemanagement_v1.types.OperationMetadata.Step]): + Detailed status information for each step. + The order is undetermined. + progress_percentage (int): + Percentage of completion of this operation, + ranging from 0 to 100. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The start time of the operation. + """ + class Status(proto.Enum): + r"""Code describes the status of the operation (or one of its + steps). + + Values: + STATUS_UNSPECIFIED (0): + Unspecifed code. + DONE (1): + The operation or step has completed without + errors. + NOT_STARTED (2): + The operation or step has not started yet. + IN_PROGRESS (3): + The operation or step is in progress. + FAILED (4): + The operation or step has completed with + errors. If the operation is rollbackable, the + rollback completed with errors too. + CANCELLED (5): + The operation or step has completed with + cancellation. + """ + STATUS_UNSPECIFIED = 0 + DONE = 1 + NOT_STARTED = 2 + IN_PROGRESS = 3 + FAILED = 4 + CANCELLED = 5 + + class Step(proto.Message): + r"""Represents the status of one operation step. + + Attributes: + description (str): + The short description of the step. + status (google.cloud.servicemanagement_v1.types.OperationMetadata.Status): + The status code. + """ + + description: str = proto.Field( + proto.STRING, + number=2, + ) + status: 'OperationMetadata.Status' = proto.Field( + proto.ENUM, + number=4, + enum='OperationMetadata.Status', + ) + + resource_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + steps: MutableSequence[Step] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Step, + ) + progress_percentage: int = proto.Field( + proto.INT32, + number=3, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class Diagnostic(proto.Message): + r"""Represents a diagnostic message (error or warning) + + Attributes: + location (str): + File name and line number of the error or + warning. + kind (google.cloud.servicemanagement_v1.types.Diagnostic.Kind): + The kind of diagnostic information provided. + message (str): + Message describing the error or warning. + """ + class Kind(proto.Enum): + r"""The kind of diagnostic information possible. + + Values: + WARNING (0): + Warnings and errors + ERROR (1): + Only errors + """ + WARNING = 0 + ERROR = 1 + + location: str = proto.Field( + proto.STRING, + number=1, + ) + kind: Kind = proto.Field( + proto.ENUM, + number=2, + enum=Kind, + ) + message: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ConfigSource(proto.Message): + r"""Represents a source file which is used to generate the service + configuration defined by ``google.api.Service``. + + Attributes: + id (str): + A unique ID for a specific instance of this + message, typically assigned by the client for + tracking purpose. If empty, the server may + choose to generate one instead. + files (MutableSequence[google.cloud.servicemanagement_v1.types.ConfigFile]): + Set of source configuration files that are used to generate + a service configuration (``google.api.Service``). + """ + + id: str = proto.Field( + proto.STRING, + number=5, + ) + files: MutableSequence['ConfigFile'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='ConfigFile', + ) + + +class ConfigFile(proto.Message): + r"""Generic specification of a source configuration file + + Attributes: + file_path (str): + The file name of the configuration file (full + or relative path). + file_contents (bytes): + The bytes that constitute the file. + file_type (google.cloud.servicemanagement_v1.types.ConfigFile.FileType): + The type of configuration file this + represents. + """ + class FileType(proto.Enum): + r""" + + Values: + FILE_TYPE_UNSPECIFIED (0): + Unknown file type. + SERVICE_CONFIG_YAML (1): + YAML-specification of service. + OPEN_API_JSON (2): + OpenAPI specification, serialized in JSON. + OPEN_API_YAML (3): + OpenAPI specification, serialized in YAML. + FILE_DESCRIPTOR_SET_PROTO (4): + FileDescriptorSet, generated by protoc. + + To generate, use protoc with imports and source info + included. For an example test.proto file, the following + command would put the value in a new file named out.pb. + + $protoc --include_imports --include_source_info test.proto + -o out.pb + PROTO_FILE (6): + Uncompiled Proto file. Used for storage and display purposes + only, currently server-side compilation is not supported. + Should match the inputs to 'protoc' command used to + generated FILE_DESCRIPTOR_SET_PROTO. A file of this type can + only be included if at least one file of type + FILE_DESCRIPTOR_SET_PROTO is included. + """ + FILE_TYPE_UNSPECIFIED = 0 + SERVICE_CONFIG_YAML = 1 + OPEN_API_JSON = 2 + OPEN_API_YAML = 3 + FILE_DESCRIPTOR_SET_PROTO = 4 + PROTO_FILE = 6 + + file_path: str = proto.Field( + proto.STRING, + number=1, + ) + file_contents: bytes = proto.Field( + proto.BYTES, + number=3, + ) + file_type: FileType = proto.Field( + proto.ENUM, + number=4, + enum=FileType, + ) + + +class ConfigRef(proto.Message): + r"""Represents a service configuration with its name and id. + + Attributes: + name (str): + Resource name of a service config. It must + have the following format: "services/{service + name}/configs/{config id}". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ChangeReport(proto.Message): + r"""Change report associated with a particular service + configuration. + It contains a list of ConfigChanges based on the comparison + between two service configurations. + + Attributes: + config_changes (MutableSequence[google.api.config_change_pb2.ConfigChange]): + List of changes between two service configurations. The + changes will be alphabetically sorted based on the + identifier of each change. A ConfigChange identifier is a + dot separated path to the configuration. Example: + visibility.rules[selector='LibraryService.CreateBook'].restriction + """ + + config_changes: MutableSequence[config_change_pb2.ConfigChange] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=config_change_pb2.ConfigChange, + ) + + +class Rollout(proto.Message): + r"""A rollout resource that defines how service configuration + versions are pushed to control plane systems. Typically, you + create a new version of the service config, and then create a + Rollout to push the service config. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + rollout_id (str): + Optional. Unique identifier of this Rollout. Must be no + longer than 63 characters and only lower case letters, + digits, '.', '_' and '-' are allowed. + + If not specified by client, the server will generate one. + The generated id will have the form of , where "date" is the + create date in ISO 8601 format. "revision number" is a + monotonically increasing positive number that is reset every + day for each service. An example of the generated rollout_id + is '2016-02-16r1' + create_time (google.protobuf.timestamp_pb2.Timestamp): + Creation time of the rollout. Readonly. + created_by (str): + The user who created the Rollout. Readonly. + status (google.cloud.servicemanagement_v1.types.Rollout.RolloutStatus): + The status of this rollout. Readonly. In case + of a failed rollout, the system will + automatically rollback to the current Rollout + version. Readonly. + traffic_percent_strategy (google.cloud.servicemanagement_v1.types.Rollout.TrafficPercentStrategy): + Google Service Control selects service + configurations based on traffic percentage. + + This field is a member of `oneof`_ ``strategy``. + delete_service_strategy (google.cloud.servicemanagement_v1.types.Rollout.DeleteServiceStrategy): + The strategy associated with a rollout to delete a + ``ManagedService``. Readonly. + + This field is a member of `oneof`_ ``strategy``. + service_name (str): + The name of the service associated with this + Rollout. + """ + class RolloutStatus(proto.Enum): + r"""Status of a Rollout. + + Values: + ROLLOUT_STATUS_UNSPECIFIED (0): + No status specified. + IN_PROGRESS (1): + The Rollout is in progress. + SUCCESS (2): + The Rollout has completed successfully. + CANCELLED (3): + The Rollout has been cancelled. This can + happen if you have overlapping Rollout pushes, + and the previous ones will be cancelled. + FAILED (4): + The Rollout has failed and the rollback + attempt has failed too. + PENDING (5): + The Rollout has not started yet and is + pending for execution. + FAILED_ROLLED_BACK (6): + The Rollout has failed and rolled back to the + previous successful Rollout. + """ + ROLLOUT_STATUS_UNSPECIFIED = 0 + IN_PROGRESS = 1 + SUCCESS = 2 + CANCELLED = 3 + FAILED = 4 + PENDING = 5 + FAILED_ROLLED_BACK = 6 + + class TrafficPercentStrategy(proto.Message): + r"""Strategy that specifies how clients of Google Service Controller + want to send traffic to use different config versions. This is + generally used by API proxy to split traffic based on your + configured percentage for each config version. + + One example of how to gradually rollout a new service configuration + using this strategy: Day 1 + + :: + + Rollout { + id: "example.googleapis.com/rollout_20160206" + traffic_percent_strategy { + percentages: { + "example.googleapis.com/20160201": 70.00 + "example.googleapis.com/20160206": 30.00 + } + } + } + + Day 2 + + :: + + Rollout { + id: "example.googleapis.com/rollout_20160207" + traffic_percent_strategy: { + percentages: { + "example.googleapis.com/20160206": 100.00 + } + } + } + + Attributes: + percentages (MutableMapping[str, float]): + Maps service configuration IDs to their + corresponding traffic percentage. Key is the + service configuration ID, Value is the traffic + percentage which must be greater than 0.0 and + the sum must equal to 100.0. + """ + + percentages: MutableMapping[str, float] = proto.MapField( + proto.STRING, + proto.DOUBLE, + number=1, + ) + + class DeleteServiceStrategy(proto.Message): + r"""Strategy used to delete a service. This strategy is a + placeholder only used by the system generated rollout to delete + a service. + + """ + + rollout_id: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + created_by: str = proto.Field( + proto.STRING, + number=3, + ) + status: RolloutStatus = proto.Field( + proto.ENUM, + number=4, + enum=RolloutStatus, + ) + traffic_percent_strategy: TrafficPercentStrategy = proto.Field( + proto.MESSAGE, + number=5, + oneof='strategy', + message=TrafficPercentStrategy, + ) + delete_service_strategy: DeleteServiceStrategy = proto.Field( + proto.MESSAGE, + number=200, + oneof='strategy', + message=DeleteServiceStrategy, + ) + service_name: str = proto.Field( + proto.STRING, + number=8, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/types/servicemanager.py b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/types/servicemanager.py new file mode 100644 index 000000000000..445cf6b0ca2d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/google/cloud/servicemanagement_v1/types/servicemanager.py @@ -0,0 +1,576 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.api import service_pb2 # type: ignore +from google.cloud.servicemanagement_v1.types import resources +from google.protobuf import any_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.api.servicemanagement.v1', + manifest={ + 'ListServicesRequest', + 'ListServicesResponse', + 'GetServiceRequest', + 'CreateServiceRequest', + 'DeleteServiceRequest', + 'UndeleteServiceRequest', + 'UndeleteServiceResponse', + 'GetServiceConfigRequest', + 'ListServiceConfigsRequest', + 'ListServiceConfigsResponse', + 'CreateServiceConfigRequest', + 'SubmitConfigSourceRequest', + 'SubmitConfigSourceResponse', + 'CreateServiceRolloutRequest', + 'ListServiceRolloutsRequest', + 'ListServiceRolloutsResponse', + 'GetServiceRolloutRequest', + 'EnableServiceResponse', + 'GenerateConfigReportRequest', + 'GenerateConfigReportResponse', + }, +) + + +class ListServicesRequest(proto.Message): + r"""Request message for ``ListServices`` method. + + Attributes: + producer_project_id (str): + Include services produced by the specified + project. + page_size (int): + The max number of items to include in the + response list. Page size is 50 if not specified. + Maximum value is 500. + page_token (str): + Token identifying which result to start with; + returned by a previous list call. + consumer_id (str): + Include services consumed by the specified consumer. + + The Google Service Management implementation accepts the + following forms: + + - project: + """ + + producer_project_id: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=5, + ) + page_token: str = proto.Field( + proto.STRING, + number=6, + ) + consumer_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ListServicesResponse(proto.Message): + r"""Response message for ``ListServices`` method. + + Attributes: + services (MutableSequence[google.cloud.servicemanagement_v1.types.ManagedService]): + The returned services will only have the name + field set. + next_page_token (str): + Token that can be passed to ``ListServices`` to resume a + paginated query. + """ + + @property + def raw_page(self): + return self + + services: MutableSequence[resources.ManagedService] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.ManagedService, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetServiceRequest(proto.Message): + r"""Request message for ``GetService`` method. + + Attributes: + service_name (str): + Required. The name of the service. See the + ``ServiceManager`` overview for naming requirements. For + example: ``example.googleapis.com``. + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateServiceRequest(proto.Message): + r"""Request message for CreateService method. + + Attributes: + service (google.cloud.servicemanagement_v1.types.ManagedService): + Required. Initial values for the service + resource. + """ + + service: resources.ManagedService = proto.Field( + proto.MESSAGE, + number=1, + message=resources.ManagedService, + ) + + +class DeleteServiceRequest(proto.Message): + r"""Request message for DeleteService method. + + Attributes: + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UndeleteServiceRequest(proto.Message): + r"""Request message for UndeleteService method. + + Attributes: + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UndeleteServiceResponse(proto.Message): + r"""Response message for UndeleteService method. + + Attributes: + service (google.cloud.servicemanagement_v1.types.ManagedService): + Revived service resource. + """ + + service: resources.ManagedService = proto.Field( + proto.MESSAGE, + number=1, + message=resources.ManagedService, + ) + + +class GetServiceConfigRequest(proto.Message): + r"""Request message for GetServiceConfig method. + + Attributes: + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + config_id (str): + Required. The id of the service configuration resource. + + This field must be specified for the server to return all + fields, including ``SourceInfo``. + view (google.cloud.servicemanagement_v1.types.GetServiceConfigRequest.ConfigView): + Specifies which parts of the Service Config + should be returned in the response. + """ + class ConfigView(proto.Enum): + r""" + + Values: + BASIC (0): + Server response includes all fields except + SourceInfo. + FULL (1): + Server response includes all fields including + SourceInfo. SourceFiles are of type + 'google.api.servicemanagement.v1.ConfigFile' and + are only available for configs created using the + SubmitConfigSource method. + """ + BASIC = 0 + FULL = 1 + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + config_id: str = proto.Field( + proto.STRING, + number=2, + ) + view: ConfigView = proto.Field( + proto.ENUM, + number=3, + enum=ConfigView, + ) + + +class ListServiceConfigsRequest(proto.Message): + r"""Request message for ListServiceConfigs method. + + Attributes: + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + page_token (str): + The token of the page to retrieve. + page_size (int): + The max number of items to include in the + response list. Page size is 50 if not specified. + Maximum value is 100. + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class ListServiceConfigsResponse(proto.Message): + r"""Response message for ListServiceConfigs method. + + Attributes: + service_configs (MutableSequence[google.api.service_pb2.Service]): + The list of service configuration resources. + next_page_token (str): + The token of the next page of results. + """ + + @property + def raw_page(self): + return self + + service_configs: MutableSequence[service_pb2.Service] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=service_pb2.Service, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateServiceConfigRequest(proto.Message): + r"""Request message for CreateServiceConfig method. + + Attributes: + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + service_config (google.api.service_pb2.Service): + Required. The service configuration resource. + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + service_config: service_pb2.Service = proto.Field( + proto.MESSAGE, + number=2, + message=service_pb2.Service, + ) + + +class SubmitConfigSourceRequest(proto.Message): + r"""Request message for SubmitConfigSource method. + + Attributes: + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + config_source (google.cloud.servicemanagement_v1.types.ConfigSource): + Required. The source configuration for the + service. + validate_only (bool): + Optional. If set, this will result in the generation of a + ``google.api.Service`` configuration based on the + ``ConfigSource`` provided, but the generated config and the + sources will NOT be persisted. + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + config_source: resources.ConfigSource = proto.Field( + proto.MESSAGE, + number=2, + message=resources.ConfigSource, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class SubmitConfigSourceResponse(proto.Message): + r"""Response message for SubmitConfigSource method. + + Attributes: + service_config (google.api.service_pb2.Service): + The generated service configuration. + """ + + service_config: service_pb2.Service = proto.Field( + proto.MESSAGE, + number=1, + message=service_pb2.Service, + ) + + +class CreateServiceRolloutRequest(proto.Message): + r"""Request message for 'CreateServiceRollout' + + Attributes: + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + rollout (google.cloud.servicemanagement_v1.types.Rollout): + Required. The rollout resource. The ``service_name`` field + is output only. + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + rollout: resources.Rollout = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Rollout, + ) + + +class ListServiceRolloutsRequest(proto.Message): + r"""Request message for 'ListServiceRollouts' + + Attributes: + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + page_token (str): + The token of the page to retrieve. + page_size (int): + The max number of items to include in the + response list. Page size is 50 if not specified. + Maximum value is 100. + filter (str): + Required. Use ``filter`` to return subset of rollouts. The + following filters are supported: + + -- By [status] + [google.api.servicemanagement.v1.Rollout.RolloutStatus]. For + example, ``filter='status=SUCCESS'`` + + -- By [strategy] + [google.api.servicemanagement.v1.Rollout.strategy]. For + example, ``filter='strategy=TrafficPercentStrategy'`` + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListServiceRolloutsResponse(proto.Message): + r"""Response message for ListServiceRollouts method. + + Attributes: + rollouts (MutableSequence[google.cloud.servicemanagement_v1.types.Rollout]): + The list of rollout resources. + next_page_token (str): + The token of the next page of results. + """ + + @property + def raw_page(self): + return self + + rollouts: MutableSequence[resources.Rollout] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Rollout, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetServiceRolloutRequest(proto.Message): + r"""Request message for GetServiceRollout method. + + Attributes: + service_name (str): + Required. The name of the service. See the + `overview `__ + for naming requirements. For example: + ``example.googleapis.com``. + rollout_id (str): + Required. The id of the rollout resource. + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + rollout_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class EnableServiceResponse(proto.Message): + r"""Operation payload for EnableService method. + """ + + +class GenerateConfigReportRequest(proto.Message): + r"""Request message for GenerateConfigReport method. + + Attributes: + new_config (google.protobuf.any_pb2.Any): + Required. Service configuration for which we want to + generate the report. For this version of API, the supported + types are + [google.api.servicemanagement.v1.ConfigRef][google.api.servicemanagement.v1.ConfigRef], + [google.api.servicemanagement.v1.ConfigSource][google.api.servicemanagement.v1.ConfigSource], + and [google.api.Service][google.api.Service] + old_config (google.protobuf.any_pb2.Any): + Optional. Service configuration against which the comparison + will be done. For this version of API, the supported types + are + [google.api.servicemanagement.v1.ConfigRef][google.api.servicemanagement.v1.ConfigRef], + [google.api.servicemanagement.v1.ConfigSource][google.api.servicemanagement.v1.ConfigSource], + and [google.api.Service][google.api.Service] + """ + + new_config: any_pb2.Any = proto.Field( + proto.MESSAGE, + number=1, + message=any_pb2.Any, + ) + old_config: any_pb2.Any = proto.Field( + proto.MESSAGE, + number=2, + message=any_pb2.Any, + ) + + +class GenerateConfigReportResponse(proto.Message): + r"""Response message for GenerateConfigReport method. + + Attributes: + service_name (str): + Name of the service this report belongs to. + id (str): + ID of the service configuration this report + belongs to. + change_reports (MutableSequence[google.cloud.servicemanagement_v1.types.ChangeReport]): + list of ChangeReport, each corresponding to + comparison between two service configurations. + diagnostics (MutableSequence[google.cloud.servicemanagement_v1.types.Diagnostic]): + Errors / Linter warnings associated with the + service definition this report + belongs to. + """ + + service_name: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + change_reports: MutableSequence[resources.ChangeReport] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=resources.ChangeReport, + ) + diagnostics: MutableSequence[resources.Diagnostic] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=resources.Diagnostic, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-service-management/v1/mypy.ini b/owl-bot-staging/google-cloud-service-management/v1/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-cloud-service-management/v1/noxfile.py b/owl-bot-staging/google-cloud-service-management/v1/noxfile.py new file mode 100644 index 000000000000..22d41d9f843e --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/noxfile.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-cloud-service-management' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.13" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): + """Run the unit test suite.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/servicemanagement_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + +@nox.session(python=ALL_PYTHON[-1]) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): + """Run the unit test suite against pre-release versions of dependencies.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/servicemanagement_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_async.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_async.py new file mode 100644 index 000000000000..fc2a7817911d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_CreateService_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +async def sample_create_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.CreateServiceRequest( + ) + + # Make the request + operation = client.create_service(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_CreateService_async] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_config_async.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_config_async.py new file mode 100644 index 000000000000..8704f8a996b4 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateServiceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_CreateServiceConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +async def sample_create_service_config(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.CreateServiceConfigRequest( + service_name="service_name_value", + ) + + # Make the request + response = await client.create_service_config(request=request) + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_CreateServiceConfig_async] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_config_sync.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_config_sync.py new file mode 100644 index 000000000000..1c92619b85c7 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateServiceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_CreateServiceConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +def sample_create_service_config(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.CreateServiceConfigRequest( + service_name="service_name_value", + ) + + # Make the request + response = client.create_service_config(request=request) + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_CreateServiceConfig_sync] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_rollout_async.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_rollout_async.py new file mode 100644 index 000000000000..2b6b9a83c219 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_rollout_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateServiceRollout +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_CreateServiceRollout_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +async def sample_create_service_rollout(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.CreateServiceRolloutRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.create_service_rollout(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_CreateServiceRollout_async] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_rollout_sync.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_rollout_sync.py new file mode 100644 index 000000000000..a9f3aff2d428 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_rollout_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateServiceRollout +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_CreateServiceRollout_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +def sample_create_service_rollout(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.CreateServiceRolloutRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.create_service_rollout(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_CreateServiceRollout_sync] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_sync.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_sync.py new file mode 100644 index 000000000000..00293a121ea9 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_CreateService_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +def sample_create_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.CreateServiceRequest( + ) + + # Make the request + operation = client.create_service(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_CreateService_sync] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_delete_service_async.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_delete_service_async.py new file mode 100644 index 000000000000..2aae72f239fb --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_delete_service_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_DeleteService_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +async def sample_delete_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.DeleteServiceRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.delete_service(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_DeleteService_async] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_delete_service_sync.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_delete_service_sync.py new file mode 100644 index 000000000000..71e44452852c --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_delete_service_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_DeleteService_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +def sample_delete_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.DeleteServiceRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.delete_service(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_DeleteService_sync] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_generate_config_report_async.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_generate_config_report_async.py new file mode 100644 index 000000000000..e3e846c4ac1f --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_generate_config_report_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateConfigReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_GenerateConfigReport_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +async def sample_generate_config_report(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GenerateConfigReportRequest( + ) + + # Make the request + response = await client.generate_config_report(request=request) + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_GenerateConfigReport_async] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_generate_config_report_sync.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_generate_config_report_sync.py new file mode 100644 index 000000000000..f0b4255d857e --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_generate_config_report_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateConfigReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_GenerateConfigReport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +def sample_generate_config_report(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GenerateConfigReportRequest( + ) + + # Make the request + response = client.generate_config_report(request=request) + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_GenerateConfigReport_sync] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_async.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_async.py new file mode 100644 index 000000000000..f7218683f3c1 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_GetService_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +async def sample_get_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GetServiceRequest( + service_name="service_name_value", + ) + + # Make the request + response = await client.get_service(request=request) + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_GetService_async] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_config_async.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_config_async.py new file mode 100644 index 000000000000..d4b41e3a771d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_config_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetServiceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_GetServiceConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +async def sample_get_service_config(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GetServiceConfigRequest( + service_name="service_name_value", + config_id="config_id_value", + ) + + # Make the request + response = await client.get_service_config(request=request) + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_GetServiceConfig_async] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_config_sync.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_config_sync.py new file mode 100644 index 000000000000..00b85c938ac9 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_config_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetServiceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_GetServiceConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +def sample_get_service_config(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GetServiceConfigRequest( + service_name="service_name_value", + config_id="config_id_value", + ) + + # Make the request + response = client.get_service_config(request=request) + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_GetServiceConfig_sync] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_rollout_async.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_rollout_async.py new file mode 100644 index 000000000000..b16f66f1f8fe --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_rollout_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetServiceRollout +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_GetServiceRollout_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +async def sample_get_service_rollout(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GetServiceRolloutRequest( + service_name="service_name_value", + rollout_id="rollout_id_value", + ) + + # Make the request + response = await client.get_service_rollout(request=request) + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_GetServiceRollout_async] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_rollout_sync.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_rollout_sync.py new file mode 100644 index 000000000000..97a265aa9797 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_rollout_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetServiceRollout +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_GetServiceRollout_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +def sample_get_service_rollout(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GetServiceRolloutRequest( + service_name="service_name_value", + rollout_id="rollout_id_value", + ) + + # Make the request + response = client.get_service_rollout(request=request) + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_GetServiceRollout_sync] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_sync.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_sync.py new file mode 100644 index 000000000000..94ad85b447ec --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_get_service_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_GetService_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +def sample_get_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.GetServiceRequest( + service_name="service_name_value", + ) + + # Make the request + response = client.get_service(request=request) + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_GetService_sync] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_configs_async.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_configs_async.py new file mode 100644 index 000000000000..38e56d2b4bf2 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_configs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListServiceConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_ListServiceConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +async def sample_list_service_configs(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.ListServiceConfigsRequest( + service_name="service_name_value", + ) + + # Make the request + page_result = client.list_service_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_ListServiceConfigs_async] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_configs_sync.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_configs_sync.py new file mode 100644 index 000000000000..f43de63b4259 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_configs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListServiceConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_ListServiceConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +def sample_list_service_configs(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.ListServiceConfigsRequest( + service_name="service_name_value", + ) + + # Make the request + page_result = client.list_service_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_ListServiceConfigs_sync] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_rollouts_async.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_rollouts_async.py new file mode 100644 index 000000000000..6054664614eb --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_rollouts_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListServiceRollouts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_ListServiceRollouts_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +async def sample_list_service_rollouts(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.ListServiceRolloutsRequest( + service_name="service_name_value", + filter="filter_value", + ) + + # Make the request + page_result = client.list_service_rollouts(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_ListServiceRollouts_async] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_rollouts_sync.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_rollouts_sync.py new file mode 100644 index 000000000000..214715580014 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_service_rollouts_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListServiceRollouts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_ListServiceRollouts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +def sample_list_service_rollouts(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.ListServiceRolloutsRequest( + service_name="service_name_value", + filter="filter_value", + ) + + # Make the request + page_result = client.list_service_rollouts(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_ListServiceRollouts_sync] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_services_async.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_services_async.py new file mode 100644 index 000000000000..1c17457c3338 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_services_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListServices +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_ListServices_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +async def sample_list_services(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.ListServicesRequest( + ) + + # Make the request + page_result = client.list_services(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_ListServices_async] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_services_sync.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_services_sync.py new file mode 100644 index 000000000000..a9c87f27e194 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_list_services_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListServices +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_ListServices_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +def sample_list_services(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.ListServicesRequest( + ) + + # Make the request + page_result = client.list_services(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_ListServices_sync] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_submit_config_source_async.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_submit_config_source_async.py new file mode 100644 index 000000000000..3f8fe02eed7e --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_submit_config_source_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SubmitConfigSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_SubmitConfigSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +async def sample_submit_config_source(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.SubmitConfigSourceRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.submit_config_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_SubmitConfigSource_async] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_submit_config_source_sync.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_submit_config_source_sync.py new file mode 100644 index 000000000000..96a61ef93c6d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_submit_config_source_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SubmitConfigSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_SubmitConfigSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +def sample_submit_config_source(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.SubmitConfigSourceRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.submit_config_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_SubmitConfigSource_sync] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_undelete_service_async.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_undelete_service_async.py new file mode 100644 index 000000000000..ec9010ce7767 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_undelete_service_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UndeleteService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_UndeleteService_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +async def sample_undelete_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerAsyncClient() + + # Initialize request argument(s) + request = servicemanagement_v1.UndeleteServiceRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.undelete_service(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_UndeleteService_async] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_undelete_service_sync.py b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_undelete_service_sync.py new file mode 100644 index 000000000000..6e3f000fb8cc --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/servicemanagement_v1_generated_service_manager_undelete_service_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UndeleteService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-management + + +# [START servicemanagement_v1_generated_ServiceManager_UndeleteService_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicemanagement_v1 + + +def sample_undelete_service(): + # Create a client + client = servicemanagement_v1.ServiceManagerClient() + + # Initialize request argument(s) + request = servicemanagement_v1.UndeleteServiceRequest( + service_name="service_name_value", + ) + + # Make the request + operation = client.undelete_service(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END servicemanagement_v1_generated_ServiceManager_UndeleteService_sync] diff --git a/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json new file mode 100644 index 000000000000..8b514f320127 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json @@ -0,0 +1,2188 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.api.servicemanagement.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-service-management", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient", + "shortName": "ServiceManagerAsyncClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient.create_service_config", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.CreateServiceConfig", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "CreateServiceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.CreateServiceConfigRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "service_config", + "type": "google.api.service_pb2.Service" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api.service_pb2.Service", + "shortName": "create_service_config" + }, + "description": "Sample for CreateServiceConfig", + "file": "servicemanagement_v1_generated_service_manager_create_service_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_CreateServiceConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_create_service_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient", + "shortName": "ServiceManagerClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient.create_service_config", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.CreateServiceConfig", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "CreateServiceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.CreateServiceConfigRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "service_config", + "type": "google.api.service_pb2.Service" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api.service_pb2.Service", + "shortName": "create_service_config" + }, + "description": "Sample for CreateServiceConfig", + "file": "servicemanagement_v1_generated_service_manager_create_service_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_CreateServiceConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_create_service_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient", + "shortName": "ServiceManagerAsyncClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient.create_service_rollout", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "CreateServiceRollout" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.CreateServiceRolloutRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "rollout", + "type": "google.cloud.servicemanagement_v1.types.Rollout" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_service_rollout" + }, + "description": "Sample for CreateServiceRollout", + "file": "servicemanagement_v1_generated_service_manager_create_service_rollout_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_CreateServiceRollout_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_create_service_rollout_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient", + "shortName": "ServiceManagerClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient.create_service_rollout", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "CreateServiceRollout" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.CreateServiceRolloutRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "rollout", + "type": "google.cloud.servicemanagement_v1.types.Rollout" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_service_rollout" + }, + "description": "Sample for CreateServiceRollout", + "file": "servicemanagement_v1_generated_service_manager_create_service_rollout_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_CreateServiceRollout_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_create_service_rollout_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient", + "shortName": "ServiceManagerAsyncClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient.create_service", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.CreateService", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "CreateService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.CreateServiceRequest" + }, + { + "name": "service", + "type": "google.cloud.servicemanagement_v1.types.ManagedService" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_service" + }, + "description": "Sample for CreateService", + "file": "servicemanagement_v1_generated_service_manager_create_service_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_CreateService_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_create_service_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient", + "shortName": "ServiceManagerClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient.create_service", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.CreateService", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "CreateService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.CreateServiceRequest" + }, + { + "name": "service", + "type": "google.cloud.servicemanagement_v1.types.ManagedService" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_service" + }, + "description": "Sample for CreateService", + "file": "servicemanagement_v1_generated_service_manager_create_service_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_CreateService_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_create_service_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient", + "shortName": "ServiceManagerAsyncClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient.delete_service", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.DeleteService", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "DeleteService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.DeleteServiceRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_service" + }, + "description": "Sample for DeleteService", + "file": "servicemanagement_v1_generated_service_manager_delete_service_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_DeleteService_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_delete_service_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient", + "shortName": "ServiceManagerClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient.delete_service", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.DeleteService", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "DeleteService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.DeleteServiceRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_service" + }, + "description": "Sample for DeleteService", + "file": "servicemanagement_v1_generated_service_manager_delete_service_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_DeleteService_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_delete_service_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient", + "shortName": "ServiceManagerAsyncClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient.generate_config_report", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.GenerateConfigReport", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "GenerateConfigReport" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.GenerateConfigReportRequest" + }, + { + "name": "new_config", + "type": "google.protobuf.any_pb2.Any" + }, + { + "name": "old_config", + "type": "google.protobuf.any_pb2.Any" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicemanagement_v1.types.GenerateConfigReportResponse", + "shortName": "generate_config_report" + }, + "description": "Sample for GenerateConfigReport", + "file": "servicemanagement_v1_generated_service_manager_generate_config_report_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_GenerateConfigReport_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_generate_config_report_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient", + "shortName": "ServiceManagerClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient.generate_config_report", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.GenerateConfigReport", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "GenerateConfigReport" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.GenerateConfigReportRequest" + }, + { + "name": "new_config", + "type": "google.protobuf.any_pb2.Any" + }, + { + "name": "old_config", + "type": "google.protobuf.any_pb2.Any" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicemanagement_v1.types.GenerateConfigReportResponse", + "shortName": "generate_config_report" + }, + "description": "Sample for GenerateConfigReport", + "file": "servicemanagement_v1_generated_service_manager_generate_config_report_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_GenerateConfigReport_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_generate_config_report_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient", + "shortName": "ServiceManagerAsyncClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient.get_service_config", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.GetServiceConfig", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "GetServiceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.GetServiceConfigRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "config_id", + "type": "str" + }, + { + "name": "view", + "type": "google.cloud.servicemanagement_v1.types.GetServiceConfigRequest.ConfigView" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api.service_pb2.Service", + "shortName": "get_service_config" + }, + "description": "Sample for GetServiceConfig", + "file": "servicemanagement_v1_generated_service_manager_get_service_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_GetServiceConfig_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_get_service_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient", + "shortName": "ServiceManagerClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient.get_service_config", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.GetServiceConfig", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "GetServiceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.GetServiceConfigRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "config_id", + "type": "str" + }, + { + "name": "view", + "type": "google.cloud.servicemanagement_v1.types.GetServiceConfigRequest.ConfigView" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api.service_pb2.Service", + "shortName": "get_service_config" + }, + "description": "Sample for GetServiceConfig", + "file": "servicemanagement_v1_generated_service_manager_get_service_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_GetServiceConfig_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_get_service_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient", + "shortName": "ServiceManagerAsyncClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient.get_service_rollout", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.GetServiceRollout", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "GetServiceRollout" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.GetServiceRolloutRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "rollout_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicemanagement_v1.types.Rollout", + "shortName": "get_service_rollout" + }, + "description": "Sample for GetServiceRollout", + "file": "servicemanagement_v1_generated_service_manager_get_service_rollout_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_GetServiceRollout_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_get_service_rollout_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient", + "shortName": "ServiceManagerClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient.get_service_rollout", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.GetServiceRollout", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "GetServiceRollout" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.GetServiceRolloutRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "rollout_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicemanagement_v1.types.Rollout", + "shortName": "get_service_rollout" + }, + "description": "Sample for GetServiceRollout", + "file": "servicemanagement_v1_generated_service_manager_get_service_rollout_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_GetServiceRollout_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_get_service_rollout_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient", + "shortName": "ServiceManagerAsyncClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient.get_service", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.GetService", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "GetService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.GetServiceRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicemanagement_v1.types.ManagedService", + "shortName": "get_service" + }, + "description": "Sample for GetService", + "file": "servicemanagement_v1_generated_service_manager_get_service_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_GetService_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_get_service_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient", + "shortName": "ServiceManagerClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient.get_service", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.GetService", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "GetService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.GetServiceRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicemanagement_v1.types.ManagedService", + "shortName": "get_service" + }, + "description": "Sample for GetService", + "file": "servicemanagement_v1_generated_service_manager_get_service_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_GetService_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_get_service_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient", + "shortName": "ServiceManagerAsyncClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient.list_service_configs", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.ListServiceConfigs", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "ListServiceConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.ListServiceConfigsRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicemanagement_v1.services.service_manager.pagers.ListServiceConfigsAsyncPager", + "shortName": "list_service_configs" + }, + "description": "Sample for ListServiceConfigs", + "file": "servicemanagement_v1_generated_service_manager_list_service_configs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_ListServiceConfigs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_list_service_configs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient", + "shortName": "ServiceManagerClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient.list_service_configs", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.ListServiceConfigs", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "ListServiceConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.ListServiceConfigsRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicemanagement_v1.services.service_manager.pagers.ListServiceConfigsPager", + "shortName": "list_service_configs" + }, + "description": "Sample for ListServiceConfigs", + "file": "servicemanagement_v1_generated_service_manager_list_service_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_ListServiceConfigs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_list_service_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient", + "shortName": "ServiceManagerAsyncClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient.list_service_rollouts", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.ListServiceRollouts", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "ListServiceRollouts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.ListServiceRolloutsRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicemanagement_v1.services.service_manager.pagers.ListServiceRolloutsAsyncPager", + "shortName": "list_service_rollouts" + }, + "description": "Sample for ListServiceRollouts", + "file": "servicemanagement_v1_generated_service_manager_list_service_rollouts_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_ListServiceRollouts_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_list_service_rollouts_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient", + "shortName": "ServiceManagerClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient.list_service_rollouts", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.ListServiceRollouts", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "ListServiceRollouts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.ListServiceRolloutsRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicemanagement_v1.services.service_manager.pagers.ListServiceRolloutsPager", + "shortName": "list_service_rollouts" + }, + "description": "Sample for ListServiceRollouts", + "file": "servicemanagement_v1_generated_service_manager_list_service_rollouts_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_ListServiceRollouts_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_list_service_rollouts_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient", + "shortName": "ServiceManagerAsyncClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient.list_services", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.ListServices", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "ListServices" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.ListServicesRequest" + }, + { + "name": "producer_project_id", + "type": "str" + }, + { + "name": "consumer_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicemanagement_v1.services.service_manager.pagers.ListServicesAsyncPager", + "shortName": "list_services" + }, + "description": "Sample for ListServices", + "file": "servicemanagement_v1_generated_service_manager_list_services_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_ListServices_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_list_services_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient", + "shortName": "ServiceManagerClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient.list_services", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.ListServices", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "ListServices" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.ListServicesRequest" + }, + { + "name": "producer_project_id", + "type": "str" + }, + { + "name": "consumer_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicemanagement_v1.services.service_manager.pagers.ListServicesPager", + "shortName": "list_services" + }, + "description": "Sample for ListServices", + "file": "servicemanagement_v1_generated_service_manager_list_services_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_ListServices_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_list_services_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient", + "shortName": "ServiceManagerAsyncClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient.submit_config_source", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.SubmitConfigSource", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "SubmitConfigSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.SubmitConfigSourceRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "config_source", + "type": "google.cloud.servicemanagement_v1.types.ConfigSource" + }, + { + "name": "validate_only", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "submit_config_source" + }, + "description": "Sample for SubmitConfigSource", + "file": "servicemanagement_v1_generated_service_manager_submit_config_source_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_SubmitConfigSource_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_submit_config_source_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient", + "shortName": "ServiceManagerClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient.submit_config_source", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.SubmitConfigSource", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "SubmitConfigSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.SubmitConfigSourceRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "config_source", + "type": "google.cloud.servicemanagement_v1.types.ConfigSource" + }, + { + "name": "validate_only", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "submit_config_source" + }, + "description": "Sample for SubmitConfigSource", + "file": "servicemanagement_v1_generated_service_manager_submit_config_source_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_SubmitConfigSource_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_submit_config_source_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient", + "shortName": "ServiceManagerAsyncClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerAsyncClient.undelete_service", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.UndeleteService", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "UndeleteService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.UndeleteServiceRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "undelete_service" + }, + "description": "Sample for UndeleteService", + "file": "servicemanagement_v1_generated_service_manager_undelete_service_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_UndeleteService_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_undelete_service_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient", + "shortName": "ServiceManagerClient" + }, + "fullName": "google.cloud.servicemanagement_v1.ServiceManagerClient.undelete_service", + "method": { + "fullName": "google.api.servicemanagement.v1.ServiceManager.UndeleteService", + "service": { + "fullName": "google.api.servicemanagement.v1.ServiceManager", + "shortName": "ServiceManager" + }, + "shortName": "UndeleteService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicemanagement_v1.types.UndeleteServiceRequest" + }, + { + "name": "service_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "undelete_service" + }, + "description": "Sample for UndeleteService", + "file": "servicemanagement_v1_generated_service_manager_undelete_service_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicemanagement_v1_generated_ServiceManager_UndeleteService_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicemanagement_v1_generated_service_manager_undelete_service_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-cloud-service-management/v1/scripts/fixup_servicemanagement_v1_keywords.py b/owl-bot-staging/google-cloud-service-management/v1/scripts/fixup_servicemanagement_v1_keywords.py new file mode 100644 index 000000000000..319d760b1b39 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/scripts/fixup_servicemanagement_v1_keywords.py @@ -0,0 +1,188 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class servicemanagementCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_service': ('service', ), + 'create_service_config': ('service_name', 'service_config', ), + 'create_service_rollout': ('service_name', 'rollout', ), + 'delete_service': ('service_name', ), + 'generate_config_report': ('new_config', 'old_config', ), + 'get_service': ('service_name', ), + 'get_service_config': ('service_name', 'config_id', 'view', ), + 'get_service_rollout': ('service_name', 'rollout_id', ), + 'list_service_configs': ('service_name', 'page_token', 'page_size', ), + 'list_service_rollouts': ('service_name', 'filter', 'page_token', 'page_size', ), + 'list_services': ('producer_project_id', 'page_size', 'page_token', 'consumer_id', ), + 'submit_config_source': ('service_name', 'config_source', 'validate_only', ), + 'undelete_service': ('service_name', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=servicemanagementCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the servicemanagement client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-service-management/v1/setup.py b/owl-bot-staging/google-cloud-service-management/v1/setup.py new file mode 100644 index 000000000000..a1a6aff87821 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/setup.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-service-management' + + +description = "Google Cloud Service Management API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/servicemanagement/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.13.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.13.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.7.txt new file mode 100644 index 000000000000..a81fb6bcd05c --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.7.txt @@ -0,0 +1,11 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 +grpc-google-iam-v1==0.12.4 diff --git a/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-service-management/v1/tests/__init__.py b/owl-bot-staging/google-cloud-service-management/v1/tests/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-management/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-service-management/v1/tests/unit/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-management/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-service-management/v1/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-management/v1/tests/unit/gapic/servicemanagement_v1/__init__.py b/owl-bot-staging/google-cloud-service-management/v1/tests/unit/gapic/servicemanagement_v1/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/tests/unit/gapic/servicemanagement_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-management/v1/tests/unit/gapic/servicemanagement_v1/test_service_manager.py b/owl-bot-staging/google-cloud-service-management/v1/tests/unit/gapic/servicemanagement_v1/test_service_manager.py new file mode 100644 index 000000000000..05e5e81d7b13 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-management/v1/tests/unit/gapic/servicemanagement_v1/test_service_manager.py @@ -0,0 +1,11683 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api import auth_pb2 # type: ignore +from google.api import backend_pb2 # type: ignore +from google.api import billing_pb2 # type: ignore +from google.api import client_pb2 # type: ignore +from google.api import context_pb2 # type: ignore +from google.api import control_pb2 # type: ignore +from google.api import documentation_pb2 # type: ignore +from google.api import endpoint_pb2 # type: ignore +from google.api import http_pb2 # type: ignore +from google.api import label_pb2 # type: ignore +from google.api import launch_stage_pb2 # type: ignore +from google.api import log_pb2 # type: ignore +from google.api import logging_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.api import monitoring_pb2 # type: ignore +from google.api import policy_pb2 # type: ignore +from google.api import quota_pb2 # type: ignore +from google.api import service_pb2 # type: ignore +from google.api import source_info_pb2 # type: ignore +from google.api import system_parameter_pb2 # type: ignore +from google.api import usage_pb2 # type: ignore +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.servicemanagement_v1.services.service_manager import ServiceManagerAsyncClient +from google.cloud.servicemanagement_v1.services.service_manager import ServiceManagerClient +from google.cloud.servicemanagement_v1.services.service_manager import pagers +from google.cloud.servicemanagement_v1.services.service_manager import transports +from google.cloud.servicemanagement_v1.types import resources +from google.cloud.servicemanagement_v1.types import servicemanager +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import api_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import source_context_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import type_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ServiceManagerClient._get_default_mtls_endpoint(None) is None + assert ServiceManagerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ServiceManagerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ServiceManagerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ServiceManagerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ServiceManagerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert ServiceManagerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ServiceManagerClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ServiceManagerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + ServiceManagerClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ServiceManagerClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ServiceManagerClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ServiceManagerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ServiceManagerClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ServiceManagerClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ServiceManagerClient._get_client_cert_source(None, False) is None + assert ServiceManagerClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert ServiceManagerClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert ServiceManagerClient._get_client_cert_source(None, True) is mock_default_cert_source + assert ServiceManagerClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(ServiceManagerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceManagerClient)) +@mock.patch.object(ServiceManagerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceManagerAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ServiceManagerClient._DEFAULT_UNIVERSE + default_endpoint = ServiceManagerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = ServiceManagerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert ServiceManagerClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert ServiceManagerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == ServiceManagerClient.DEFAULT_MTLS_ENDPOINT + assert ServiceManagerClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert ServiceManagerClient._get_api_endpoint(None, None, default_universe, "always") == ServiceManagerClient.DEFAULT_MTLS_ENDPOINT + assert ServiceManagerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == ServiceManagerClient.DEFAULT_MTLS_ENDPOINT + assert ServiceManagerClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert ServiceManagerClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + ServiceManagerClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ServiceManagerClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert ServiceManagerClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert ServiceManagerClient._get_universe_domain(None, None) == ServiceManagerClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + ServiceManagerClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ServiceManagerClient, "grpc"), + (ServiceManagerAsyncClient, "grpc_asyncio"), + (ServiceManagerClient, "rest"), +]) +def test_service_manager_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'servicemanagement.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://servicemanagement.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ServiceManagerGrpcTransport, "grpc"), + (transports.ServiceManagerGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ServiceManagerRestTransport, "rest"), +]) +def test_service_manager_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ServiceManagerClient, "grpc"), + (ServiceManagerAsyncClient, "grpc_asyncio"), + (ServiceManagerClient, "rest"), +]) +def test_service_manager_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'servicemanagement.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://servicemanagement.googleapis.com' + ) + + +def test_service_manager_client_get_transport_class(): + transport = ServiceManagerClient.get_transport_class() + available_transports = [ + transports.ServiceManagerGrpcTransport, + transports.ServiceManagerRestTransport, + ] + assert transport in available_transports + + transport = ServiceManagerClient.get_transport_class("grpc") + assert transport == transports.ServiceManagerGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ServiceManagerClient, transports.ServiceManagerGrpcTransport, "grpc"), + (ServiceManagerAsyncClient, transports.ServiceManagerGrpcAsyncIOTransport, "grpc_asyncio"), + (ServiceManagerClient, transports.ServiceManagerRestTransport, "rest"), +]) +@mock.patch.object(ServiceManagerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceManagerClient)) +@mock.patch.object(ServiceManagerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceManagerAsyncClient)) +def test_service_manager_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ServiceManagerClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ServiceManagerClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ServiceManagerClient, transports.ServiceManagerGrpcTransport, "grpc", "true"), + (ServiceManagerAsyncClient, transports.ServiceManagerGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (ServiceManagerClient, transports.ServiceManagerGrpcTransport, "grpc", "false"), + (ServiceManagerAsyncClient, transports.ServiceManagerGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (ServiceManagerClient, transports.ServiceManagerRestTransport, "rest", "true"), + (ServiceManagerClient, transports.ServiceManagerRestTransport, "rest", "false"), +]) +@mock.patch.object(ServiceManagerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceManagerClient)) +@mock.patch.object(ServiceManagerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceManagerAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_service_manager_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ServiceManagerClient, ServiceManagerAsyncClient +]) +@mock.patch.object(ServiceManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceManagerClient)) +@mock.patch.object(ServiceManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceManagerAsyncClient)) +def test_service_manager_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + ServiceManagerClient, ServiceManagerAsyncClient +]) +@mock.patch.object(ServiceManagerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceManagerClient)) +@mock.patch.object(ServiceManagerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceManagerAsyncClient)) +def test_service_manager_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ServiceManagerClient._DEFAULT_UNIVERSE + default_endpoint = ServiceManagerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = ServiceManagerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ServiceManagerClient, transports.ServiceManagerGrpcTransport, "grpc"), + (ServiceManagerAsyncClient, transports.ServiceManagerGrpcAsyncIOTransport, "grpc_asyncio"), + (ServiceManagerClient, transports.ServiceManagerRestTransport, "rest"), +]) +def test_service_manager_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ServiceManagerClient, transports.ServiceManagerGrpcTransport, "grpc", grpc_helpers), + (ServiceManagerAsyncClient, transports.ServiceManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (ServiceManagerClient, transports.ServiceManagerRestTransport, "rest", None), +]) +def test_service_manager_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_service_manager_client_client_options_from_dict(): + with mock.patch('google.cloud.servicemanagement_v1.services.service_manager.transports.ServiceManagerGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = ServiceManagerClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ServiceManagerClient, transports.ServiceManagerGrpcTransport, "grpc", grpc_helpers), + (ServiceManagerAsyncClient, transports.ServiceManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_service_manager_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "servicemanagement.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/service.management', + 'https://www.googleapis.com/auth/service.management.readonly', +), + scopes=None, + default_host="servicemanagement.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.ListServicesRequest, + dict, +]) +def test_list_services(request_type, transport: str = 'grpc'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = servicemanager.ListServicesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = servicemanager.ListServicesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServicesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_services_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = servicemanager.ListServicesRequest( + producer_project_id='producer_project_id_value', + page_token='page_token_value', + consumer_id='consumer_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_services(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == servicemanager.ListServicesRequest( + producer_project_id='producer_project_id_value', + page_token='page_token_value', + consumer_id='consumer_id_value', + ) + +def test_list_services_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_services in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_services] = mock_rpc + request = {} + client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_services(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_services_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_services in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_services] = mock_rpc + + request = {} + await client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_services(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_services_async(transport: str = 'grpc_asyncio', request_type=servicemanager.ListServicesRequest): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(servicemanager.ListServicesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = servicemanager.ListServicesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServicesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_services_async_from_dict(): + await test_list_services_async(request_type=dict) + + +def test_list_services_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = servicemanager.ListServicesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_services( + producer_project_id='producer_project_id_value', + consumer_id='consumer_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].producer_project_id + mock_val = 'producer_project_id_value' + assert arg == mock_val + arg = args[0].consumer_id + mock_val = 'consumer_id_value' + assert arg == mock_val + + +def test_list_services_flattened_error(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_services( + servicemanager.ListServicesRequest(), + producer_project_id='producer_project_id_value', + consumer_id='consumer_id_value', + ) + +@pytest.mark.asyncio +async def test_list_services_flattened_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = servicemanager.ListServicesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(servicemanager.ListServicesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_services( + producer_project_id='producer_project_id_value', + consumer_id='consumer_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].producer_project_id + mock_val = 'producer_project_id_value' + assert arg == mock_val + arg = args[0].consumer_id + mock_val = 'consumer_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_services_flattened_error_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_services( + servicemanager.ListServicesRequest(), + producer_project_id='producer_project_id_value', + consumer_id='consumer_id_value', + ) + + +def test_list_services_pager(transport_name: str = "grpc"): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + servicemanager.ListServicesResponse( + services=[ + resources.ManagedService(), + resources.ManagedService(), + resources.ManagedService(), + ], + next_page_token='abc', + ), + servicemanager.ListServicesResponse( + services=[], + next_page_token='def', + ), + servicemanager.ListServicesResponse( + services=[ + resources.ManagedService(), + ], + next_page_token='ghi', + ), + servicemanager.ListServicesResponse( + services=[ + resources.ManagedService(), + resources.ManagedService(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.list_services(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.ManagedService) + for i in results) +def test_list_services_pages(transport_name: str = "grpc"): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + servicemanager.ListServicesResponse( + services=[ + resources.ManagedService(), + resources.ManagedService(), + resources.ManagedService(), + ], + next_page_token='abc', + ), + servicemanager.ListServicesResponse( + services=[], + next_page_token='def', + ), + servicemanager.ListServicesResponse( + services=[ + resources.ManagedService(), + ], + next_page_token='ghi', + ), + servicemanager.ListServicesResponse( + services=[ + resources.ManagedService(), + resources.ManagedService(), + ], + ), + RuntimeError, + ) + pages = list(client.list_services(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_services_async_pager(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + servicemanager.ListServicesResponse( + services=[ + resources.ManagedService(), + resources.ManagedService(), + resources.ManagedService(), + ], + next_page_token='abc', + ), + servicemanager.ListServicesResponse( + services=[], + next_page_token='def', + ), + servicemanager.ListServicesResponse( + services=[ + resources.ManagedService(), + ], + next_page_token='ghi', + ), + servicemanager.ListServicesResponse( + services=[ + resources.ManagedService(), + resources.ManagedService(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_services(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.ManagedService) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_services_async_pages(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + servicemanager.ListServicesResponse( + services=[ + resources.ManagedService(), + resources.ManagedService(), + resources.ManagedService(), + ], + next_page_token='abc', + ), + servicemanager.ListServicesResponse( + services=[], + next_page_token='def', + ), + servicemanager.ListServicesResponse( + services=[ + resources.ManagedService(), + ], + next_page_token='ghi', + ), + servicemanager.ListServicesResponse( + services=[ + resources.ManagedService(), + resources.ManagedService(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_services(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + servicemanager.GetServiceRequest, + dict, +]) +def test_get_service(request_type, transport: str = 'grpc'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ManagedService( + service_name='service_name_value', + producer_project_id='producer_project_id_value', + ) + response = client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = servicemanager.GetServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ManagedService) + assert response.service_name == 'service_name_value' + assert response.producer_project_id == 'producer_project_id_value' + + +def test_get_service_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = servicemanager.GetServiceRequest( + service_name='service_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_service(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == servicemanager.GetServiceRequest( + service_name='service_name_value', + ) + +def test_get_service_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_service in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_service] = mock_rpc + request = {} + client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_service_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_service in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_service] = mock_rpc + + request = {} + await client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_service_async(transport: str = 'grpc_asyncio', request_type=servicemanager.GetServiceRequest): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.ManagedService( + service_name='service_name_value', + producer_project_id='producer_project_id_value', + )) + response = await client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = servicemanager.GetServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ManagedService) + assert response.service_name == 'service_name_value' + assert response.producer_project_id == 'producer_project_id_value' + + +@pytest.mark.asyncio +async def test_get_service_async_from_dict(): + await test_get_service_async(request_type=dict) + +def test_get_service_field_headers(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.GetServiceRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + call.return_value = resources.ManagedService() + client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_service_field_headers_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.GetServiceRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.ManagedService()) + await client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +def test_get_service_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ManagedService() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_service( + service_name='service_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + + +def test_get_service_flattened_error(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_service( + servicemanager.GetServiceRequest(), + service_name='service_name_value', + ) + +@pytest.mark.asyncio +async def test_get_service_flattened_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ManagedService() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.ManagedService()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_service( + service_name='service_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_service_flattened_error_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_service( + servicemanager.GetServiceRequest(), + service_name='service_name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.CreateServiceRequest, + dict, +]) +def test_create_service(request_type, transport: str = 'grpc'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = servicemanager.CreateServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_service_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = servicemanager.CreateServiceRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_service(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == servicemanager.CreateServiceRequest( + ) + +def test_create_service_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_service in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_service] = mock_rpc + request = {} + client.create_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_service_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_service in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_service] = mock_rpc + + request = {} + await client.create_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_service_async(transport: str = 'grpc_asyncio', request_type=servicemanager.CreateServiceRequest): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = servicemanager.CreateServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_service_async_from_dict(): + await test_create_service_async(request_type=dict) + + +def test_create_service_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_service( + service=resources.ManagedService(service_name='service_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].service + mock_val = resources.ManagedService(service_name='service_name_value') + assert arg == mock_val + + +def test_create_service_flattened_error(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_service( + servicemanager.CreateServiceRequest(), + service=resources.ManagedService(service_name='service_name_value'), + ) + +@pytest.mark.asyncio +async def test_create_service_flattened_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_service( + service=resources.ManagedService(service_name='service_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].service + mock_val = resources.ManagedService(service_name='service_name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_service_flattened_error_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_service( + servicemanager.CreateServiceRequest(), + service=resources.ManagedService(service_name='service_name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.DeleteServiceRequest, + dict, +]) +def test_delete_service(request_type, transport: str = 'grpc'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = servicemanager.DeleteServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_service_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = servicemanager.DeleteServiceRequest( + service_name='service_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_service(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == servicemanager.DeleteServiceRequest( + service_name='service_name_value', + ) + +def test_delete_service_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_service in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_service] = mock_rpc + request = {} + client.delete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_service_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_service in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_service] = mock_rpc + + request = {} + await client.delete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_service_async(transport: str = 'grpc_asyncio', request_type=servicemanager.DeleteServiceRequest): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = servicemanager.DeleteServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_service_async_from_dict(): + await test_delete_service_async(request_type=dict) + +def test_delete_service_field_headers(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.DeleteServiceRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_service_field_headers_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.DeleteServiceRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +def test_delete_service_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_service( + service_name='service_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + + +def test_delete_service_flattened_error(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_service( + servicemanager.DeleteServiceRequest(), + service_name='service_name_value', + ) + +@pytest.mark.asyncio +async def test_delete_service_flattened_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_service( + service_name='service_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_service_flattened_error_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_service( + servicemanager.DeleteServiceRequest(), + service_name='service_name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.UndeleteServiceRequest, + dict, +]) +def test_undelete_service(request_type, transport: str = 'grpc'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.undelete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = servicemanager.UndeleteServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_undelete_service_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = servicemanager.UndeleteServiceRequest( + service_name='service_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_service), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.undelete_service(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == servicemanager.UndeleteServiceRequest( + service_name='service_name_value', + ) + +def test_undelete_service_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.undelete_service in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.undelete_service] = mock_rpc + request = {} + client.undelete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.undelete_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_undelete_service_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.undelete_service in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.undelete_service] = mock_rpc + + request = {} + await client.undelete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.undelete_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_undelete_service_async(transport: str = 'grpc_asyncio', request_type=servicemanager.UndeleteServiceRequest): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.undelete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = servicemanager.UndeleteServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_undelete_service_async_from_dict(): + await test_undelete_service_async(request_type=dict) + +def test_undelete_service_field_headers(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.UndeleteServiceRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_service), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.undelete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_undelete_service_field_headers_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.UndeleteServiceRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_service), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.undelete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +def test_undelete_service_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.undelete_service( + service_name='service_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + + +def test_undelete_service_flattened_error(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.undelete_service( + servicemanager.UndeleteServiceRequest(), + service_name='service_name_value', + ) + +@pytest.mark.asyncio +async def test_undelete_service_flattened_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.undelete_service( + service_name='service_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_undelete_service_flattened_error_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.undelete_service( + servicemanager.UndeleteServiceRequest(), + service_name='service_name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.ListServiceConfigsRequest, + dict, +]) +def test_list_service_configs(request_type, transport: str = 'grpc'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = servicemanager.ListServiceConfigsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_service_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = servicemanager.ListServiceConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServiceConfigsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_service_configs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = servicemanager.ListServiceConfigsRequest( + service_name='service_name_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_configs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_service_configs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == servicemanager.ListServiceConfigsRequest( + service_name='service_name_value', + page_token='page_token_value', + ) + +def test_list_service_configs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_service_configs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_service_configs] = mock_rpc + request = {} + client.list_service_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_service_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_service_configs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_service_configs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_service_configs] = mock_rpc + + request = {} + await client.list_service_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_service_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_service_configs_async(transport: str = 'grpc_asyncio', request_type=servicemanager.ListServiceConfigsRequest): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(servicemanager.ListServiceConfigsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_service_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = servicemanager.ListServiceConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServiceConfigsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_service_configs_async_from_dict(): + await test_list_service_configs_async(request_type=dict) + +def test_list_service_configs_field_headers(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.ListServiceConfigsRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_configs), + '__call__') as call: + call.return_value = servicemanager.ListServiceConfigsResponse() + client.list_service_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_service_configs_field_headers_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.ListServiceConfigsRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_configs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(servicemanager.ListServiceConfigsResponse()) + await client.list_service_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +def test_list_service_configs_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = servicemanager.ListServiceConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_service_configs( + service_name='service_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + + +def test_list_service_configs_flattened_error(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_service_configs( + servicemanager.ListServiceConfigsRequest(), + service_name='service_name_value', + ) + +@pytest.mark.asyncio +async def test_list_service_configs_flattened_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = servicemanager.ListServiceConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(servicemanager.ListServiceConfigsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_service_configs( + service_name='service_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_service_configs_flattened_error_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_service_configs( + servicemanager.ListServiceConfigsRequest(), + service_name='service_name_value', + ) + + +def test_list_service_configs_pager(transport_name: str = "grpc"): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_configs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + servicemanager.ListServiceConfigsResponse( + service_configs=[ + service_pb2.Service(), + service_pb2.Service(), + service_pb2.Service(), + ], + next_page_token='abc', + ), + servicemanager.ListServiceConfigsResponse( + service_configs=[], + next_page_token='def', + ), + servicemanager.ListServiceConfigsResponse( + service_configs=[ + service_pb2.Service(), + ], + next_page_token='ghi', + ), + servicemanager.ListServiceConfigsResponse( + service_configs=[ + service_pb2.Service(), + service_pb2.Service(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('service_name', ''), + )), + ) + pager = client.list_service_configs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, service_pb2.Service) + for i in results) +def test_list_service_configs_pages(transport_name: str = "grpc"): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_configs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + servicemanager.ListServiceConfigsResponse( + service_configs=[ + service_pb2.Service(), + service_pb2.Service(), + service_pb2.Service(), + ], + next_page_token='abc', + ), + servicemanager.ListServiceConfigsResponse( + service_configs=[], + next_page_token='def', + ), + servicemanager.ListServiceConfigsResponse( + service_configs=[ + service_pb2.Service(), + ], + next_page_token='ghi', + ), + servicemanager.ListServiceConfigsResponse( + service_configs=[ + service_pb2.Service(), + service_pb2.Service(), + ], + ), + RuntimeError, + ) + pages = list(client.list_service_configs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_service_configs_async_pager(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_configs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + servicemanager.ListServiceConfigsResponse( + service_configs=[ + service_pb2.Service(), + service_pb2.Service(), + service_pb2.Service(), + ], + next_page_token='abc', + ), + servicemanager.ListServiceConfigsResponse( + service_configs=[], + next_page_token='def', + ), + servicemanager.ListServiceConfigsResponse( + service_configs=[ + service_pb2.Service(), + ], + next_page_token='ghi', + ), + servicemanager.ListServiceConfigsResponse( + service_configs=[ + service_pb2.Service(), + service_pb2.Service(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_service_configs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, service_pb2.Service) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_service_configs_async_pages(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_configs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + servicemanager.ListServiceConfigsResponse( + service_configs=[ + service_pb2.Service(), + service_pb2.Service(), + service_pb2.Service(), + ], + next_page_token='abc', + ), + servicemanager.ListServiceConfigsResponse( + service_configs=[], + next_page_token='def', + ), + servicemanager.ListServiceConfigsResponse( + service_configs=[ + service_pb2.Service(), + ], + next_page_token='ghi', + ), + servicemanager.ListServiceConfigsResponse( + service_configs=[ + service_pb2.Service(), + service_pb2.Service(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_service_configs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + servicemanager.GetServiceConfigRequest, + dict, +]) +def test_get_service_config(request_type, transport: str = 'grpc'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_pb2.Service( + name='name_value', + title='title_value', + producer_project_id='producer_project_id_value', + id='id_value', + ) + response = client.get_service_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = servicemanager.GetServiceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service_pb2.Service) + assert response.name == 'name_value' + assert response.title == 'title_value' + assert response.producer_project_id == 'producer_project_id_value' + assert response.id == 'id_value' + + +def test_get_service_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = servicemanager.GetServiceConfigRequest( + service_name='service_name_value', + config_id='config_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_service_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == servicemanager.GetServiceConfigRequest( + service_name='service_name_value', + config_id='config_id_value', + ) + +def test_get_service_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_service_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_service_config] = mock_rpc + request = {} + client.get_service_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_service_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_service_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_service_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_service_config] = mock_rpc + + request = {} + await client.get_service_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_service_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_service_config_async(transport: str = 'grpc_asyncio', request_type=servicemanager.GetServiceConfigRequest): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service_pb2.Service( + name='name_value', + title='title_value', + producer_project_id='producer_project_id_value', + id='id_value', + )) + response = await client.get_service_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = servicemanager.GetServiceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service_pb2.Service) + assert response.name == 'name_value' + assert response.title == 'title_value' + assert response.producer_project_id == 'producer_project_id_value' + assert response.id == 'id_value' + + +@pytest.mark.asyncio +async def test_get_service_config_async_from_dict(): + await test_get_service_config_async(request_type=dict) + +def test_get_service_config_field_headers(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.GetServiceConfigRequest() + + request.service_name = 'service_name_value' + request.config_id = 'config_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_config), + '__call__') as call: + call.return_value = service_pb2.Service() + client.get_service_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value&config_id=config_id_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_service_config_field_headers_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.GetServiceConfigRequest() + + request.service_name = 'service_name_value' + request.config_id = 'config_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_pb2.Service()) + await client.get_service_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value&config_id=config_id_value', + ) in kw['metadata'] + + +def test_get_service_config_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_pb2.Service() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_service_config( + service_name='service_name_value', + config_id='config_id_value', + view=servicemanager.GetServiceConfigRequest.ConfigView.FULL, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + arg = args[0].config_id + mock_val = 'config_id_value' + assert arg == mock_val + arg = args[0].view + mock_val = servicemanager.GetServiceConfigRequest.ConfigView.FULL + assert arg == mock_val + + +def test_get_service_config_flattened_error(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_service_config( + servicemanager.GetServiceConfigRequest(), + service_name='service_name_value', + config_id='config_id_value', + view=servicemanager.GetServiceConfigRequest.ConfigView.FULL, + ) + +@pytest.mark.asyncio +async def test_get_service_config_flattened_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_pb2.Service() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_pb2.Service()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_service_config( + service_name='service_name_value', + config_id='config_id_value', + view=servicemanager.GetServiceConfigRequest.ConfigView.FULL, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + arg = args[0].config_id + mock_val = 'config_id_value' + assert arg == mock_val + arg = args[0].view + mock_val = servicemanager.GetServiceConfigRequest.ConfigView.FULL + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_service_config_flattened_error_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_service_config( + servicemanager.GetServiceConfigRequest(), + service_name='service_name_value', + config_id='config_id_value', + view=servicemanager.GetServiceConfigRequest.ConfigView.FULL, + ) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.CreateServiceConfigRequest, + dict, +]) +def test_create_service_config(request_type, transport: str = 'grpc'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_pb2.Service( + name='name_value', + title='title_value', + producer_project_id='producer_project_id_value', + id='id_value', + ) + response = client.create_service_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = servicemanager.CreateServiceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service_pb2.Service) + assert response.name == 'name_value' + assert response.title == 'title_value' + assert response.producer_project_id == 'producer_project_id_value' + assert response.id == 'id_value' + + +def test_create_service_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = servicemanager.CreateServiceConfigRequest( + service_name='service_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_service_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == servicemanager.CreateServiceConfigRequest( + service_name='service_name_value', + ) + +def test_create_service_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_service_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_service_config] = mock_rpc + request = {} + client.create_service_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_service_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_service_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_service_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_service_config] = mock_rpc + + request = {} + await client.create_service_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_service_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_service_config_async(transport: str = 'grpc_asyncio', request_type=servicemanager.CreateServiceConfigRequest): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service_pb2.Service( + name='name_value', + title='title_value', + producer_project_id='producer_project_id_value', + id='id_value', + )) + response = await client.create_service_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = servicemanager.CreateServiceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service_pb2.Service) + assert response.name == 'name_value' + assert response.title == 'title_value' + assert response.producer_project_id == 'producer_project_id_value' + assert response.id == 'id_value' + + +@pytest.mark.asyncio +async def test_create_service_config_async_from_dict(): + await test_create_service_config_async(request_type=dict) + +def test_create_service_config_field_headers(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.CreateServiceConfigRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_config), + '__call__') as call: + call.return_value = service_pb2.Service() + client.create_service_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_service_config_field_headers_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.CreateServiceConfigRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_pb2.Service()) + await client.create_service_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +def test_create_service_config_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_pb2.Service() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_service_config( + service_name='service_name_value', + service_config=service_pb2.Service(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + arg = args[0].service_config + mock_val = service_pb2.Service(name='name_value') + assert arg == mock_val + + +def test_create_service_config_flattened_error(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_service_config( + servicemanager.CreateServiceConfigRequest(), + service_name='service_name_value', + service_config=service_pb2.Service(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_service_config_flattened_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service_pb2.Service() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_pb2.Service()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_service_config( + service_name='service_name_value', + service_config=service_pb2.Service(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + arg = args[0].service_config + mock_val = service_pb2.Service(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_service_config_flattened_error_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_service_config( + servicemanager.CreateServiceConfigRequest(), + service_name='service_name_value', + service_config=service_pb2.Service(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.SubmitConfigSourceRequest, + dict, +]) +def test_submit_config_source(request_type, transport: str = 'grpc'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_config_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.submit_config_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = servicemanager.SubmitConfigSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_submit_config_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = servicemanager.SubmitConfigSourceRequest( + service_name='service_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_config_source), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.submit_config_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == servicemanager.SubmitConfigSourceRequest( + service_name='service_name_value', + ) + +def test_submit_config_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.submit_config_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.submit_config_source] = mock_rpc + request = {} + client.submit_config_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.submit_config_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_submit_config_source_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.submit_config_source in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.submit_config_source] = mock_rpc + + request = {} + await client.submit_config_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.submit_config_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_submit_config_source_async(transport: str = 'grpc_asyncio', request_type=servicemanager.SubmitConfigSourceRequest): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_config_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.submit_config_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = servicemanager.SubmitConfigSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_submit_config_source_async_from_dict(): + await test_submit_config_source_async(request_type=dict) + +def test_submit_config_source_field_headers(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.SubmitConfigSourceRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_config_source), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.submit_config_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_submit_config_source_field_headers_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.SubmitConfigSourceRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_config_source), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.submit_config_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +def test_submit_config_source_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_config_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.submit_config_source( + service_name='service_name_value', + config_source=resources.ConfigSource(id='id_value'), + validate_only=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + arg = args[0].config_source + mock_val = resources.ConfigSource(id='id_value') + assert arg == mock_val + arg = args[0].validate_only + mock_val = True + assert arg == mock_val + + +def test_submit_config_source_flattened_error(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.submit_config_source( + servicemanager.SubmitConfigSourceRequest(), + service_name='service_name_value', + config_source=resources.ConfigSource(id='id_value'), + validate_only=True, + ) + +@pytest.mark.asyncio +async def test_submit_config_source_flattened_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_config_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.submit_config_source( + service_name='service_name_value', + config_source=resources.ConfigSource(id='id_value'), + validate_only=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + arg = args[0].config_source + mock_val = resources.ConfigSource(id='id_value') + assert arg == mock_val + arg = args[0].validate_only + mock_val = True + assert arg == mock_val + +@pytest.mark.asyncio +async def test_submit_config_source_flattened_error_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.submit_config_source( + servicemanager.SubmitConfigSourceRequest(), + service_name='service_name_value', + config_source=resources.ConfigSource(id='id_value'), + validate_only=True, + ) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.ListServiceRolloutsRequest, + dict, +]) +def test_list_service_rollouts(request_type, transport: str = 'grpc'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_rollouts), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = servicemanager.ListServiceRolloutsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_service_rollouts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = servicemanager.ListServiceRolloutsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServiceRolloutsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_service_rollouts_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = servicemanager.ListServiceRolloutsRequest( + service_name='service_name_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_rollouts), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_service_rollouts(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == servicemanager.ListServiceRolloutsRequest( + service_name='service_name_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_service_rollouts_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_service_rollouts in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_service_rollouts] = mock_rpc + request = {} + client.list_service_rollouts(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_service_rollouts(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_service_rollouts_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_service_rollouts in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_service_rollouts] = mock_rpc + + request = {} + await client.list_service_rollouts(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_service_rollouts(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_service_rollouts_async(transport: str = 'grpc_asyncio', request_type=servicemanager.ListServiceRolloutsRequest): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_rollouts), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(servicemanager.ListServiceRolloutsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_service_rollouts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = servicemanager.ListServiceRolloutsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServiceRolloutsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_service_rollouts_async_from_dict(): + await test_list_service_rollouts_async(request_type=dict) + +def test_list_service_rollouts_field_headers(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.ListServiceRolloutsRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_rollouts), + '__call__') as call: + call.return_value = servicemanager.ListServiceRolloutsResponse() + client.list_service_rollouts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_service_rollouts_field_headers_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.ListServiceRolloutsRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_rollouts), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(servicemanager.ListServiceRolloutsResponse()) + await client.list_service_rollouts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +def test_list_service_rollouts_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_rollouts), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = servicemanager.ListServiceRolloutsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_service_rollouts( + service_name='service_name_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + + +def test_list_service_rollouts_flattened_error(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_service_rollouts( + servicemanager.ListServiceRolloutsRequest(), + service_name='service_name_value', + filter='filter_value', + ) + +@pytest.mark.asyncio +async def test_list_service_rollouts_flattened_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_rollouts), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = servicemanager.ListServiceRolloutsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(servicemanager.ListServiceRolloutsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_service_rollouts( + service_name='service_name_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_service_rollouts_flattened_error_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_service_rollouts( + servicemanager.ListServiceRolloutsRequest(), + service_name='service_name_value', + filter='filter_value', + ) + + +def test_list_service_rollouts_pager(transport_name: str = "grpc"): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_rollouts), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + servicemanager.ListServiceRolloutsResponse( + rollouts=[ + resources.Rollout(), + resources.Rollout(), + resources.Rollout(), + ], + next_page_token='abc', + ), + servicemanager.ListServiceRolloutsResponse( + rollouts=[], + next_page_token='def', + ), + servicemanager.ListServiceRolloutsResponse( + rollouts=[ + resources.Rollout(), + ], + next_page_token='ghi', + ), + servicemanager.ListServiceRolloutsResponse( + rollouts=[ + resources.Rollout(), + resources.Rollout(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('service_name', ''), + )), + ) + pager = client.list_service_rollouts(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Rollout) + for i in results) +def test_list_service_rollouts_pages(transport_name: str = "grpc"): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_rollouts), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + servicemanager.ListServiceRolloutsResponse( + rollouts=[ + resources.Rollout(), + resources.Rollout(), + resources.Rollout(), + ], + next_page_token='abc', + ), + servicemanager.ListServiceRolloutsResponse( + rollouts=[], + next_page_token='def', + ), + servicemanager.ListServiceRolloutsResponse( + rollouts=[ + resources.Rollout(), + ], + next_page_token='ghi', + ), + servicemanager.ListServiceRolloutsResponse( + rollouts=[ + resources.Rollout(), + resources.Rollout(), + ], + ), + RuntimeError, + ) + pages = list(client.list_service_rollouts(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_service_rollouts_async_pager(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_rollouts), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + servicemanager.ListServiceRolloutsResponse( + rollouts=[ + resources.Rollout(), + resources.Rollout(), + resources.Rollout(), + ], + next_page_token='abc', + ), + servicemanager.ListServiceRolloutsResponse( + rollouts=[], + next_page_token='def', + ), + servicemanager.ListServiceRolloutsResponse( + rollouts=[ + resources.Rollout(), + ], + next_page_token='ghi', + ), + servicemanager.ListServiceRolloutsResponse( + rollouts=[ + resources.Rollout(), + resources.Rollout(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_service_rollouts(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Rollout) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_service_rollouts_async_pages(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_service_rollouts), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + servicemanager.ListServiceRolloutsResponse( + rollouts=[ + resources.Rollout(), + resources.Rollout(), + resources.Rollout(), + ], + next_page_token='abc', + ), + servicemanager.ListServiceRolloutsResponse( + rollouts=[], + next_page_token='def', + ), + servicemanager.ListServiceRolloutsResponse( + rollouts=[ + resources.Rollout(), + ], + next_page_token='ghi', + ), + servicemanager.ListServiceRolloutsResponse( + rollouts=[ + resources.Rollout(), + resources.Rollout(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_service_rollouts(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + servicemanager.GetServiceRolloutRequest, + dict, +]) +def test_get_service_rollout(request_type, transport: str = 'grpc'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_rollout), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Rollout( + rollout_id='rollout_id_value', + created_by='created_by_value', + status=resources.Rollout.RolloutStatus.IN_PROGRESS, + service_name='service_name_value', + ) + response = client.get_service_rollout(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = servicemanager.GetServiceRolloutRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Rollout) + assert response.rollout_id == 'rollout_id_value' + assert response.created_by == 'created_by_value' + assert response.status == resources.Rollout.RolloutStatus.IN_PROGRESS + assert response.service_name == 'service_name_value' + + +def test_get_service_rollout_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = servicemanager.GetServiceRolloutRequest( + service_name='service_name_value', + rollout_id='rollout_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_rollout), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_service_rollout(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == servicemanager.GetServiceRolloutRequest( + service_name='service_name_value', + rollout_id='rollout_id_value', + ) + +def test_get_service_rollout_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_service_rollout in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_service_rollout] = mock_rpc + request = {} + client.get_service_rollout(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_service_rollout(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_service_rollout_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_service_rollout in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_service_rollout] = mock_rpc + + request = {} + await client.get_service_rollout(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_service_rollout(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_service_rollout_async(transport: str = 'grpc_asyncio', request_type=servicemanager.GetServiceRolloutRequest): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_rollout), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Rollout( + rollout_id='rollout_id_value', + created_by='created_by_value', + status=resources.Rollout.RolloutStatus.IN_PROGRESS, + service_name='service_name_value', + )) + response = await client.get_service_rollout(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = servicemanager.GetServiceRolloutRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Rollout) + assert response.rollout_id == 'rollout_id_value' + assert response.created_by == 'created_by_value' + assert response.status == resources.Rollout.RolloutStatus.IN_PROGRESS + assert response.service_name == 'service_name_value' + + +@pytest.mark.asyncio +async def test_get_service_rollout_async_from_dict(): + await test_get_service_rollout_async(request_type=dict) + +def test_get_service_rollout_field_headers(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.GetServiceRolloutRequest() + + request.service_name = 'service_name_value' + request.rollout_id = 'rollout_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_rollout), + '__call__') as call: + call.return_value = resources.Rollout() + client.get_service_rollout(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value&rollout_id=rollout_id_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_service_rollout_field_headers_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.GetServiceRolloutRequest() + + request.service_name = 'service_name_value' + request.rollout_id = 'rollout_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_rollout), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Rollout()) + await client.get_service_rollout(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value&rollout_id=rollout_id_value', + ) in kw['metadata'] + + +def test_get_service_rollout_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_rollout), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Rollout() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_service_rollout( + service_name='service_name_value', + rollout_id='rollout_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + arg = args[0].rollout_id + mock_val = 'rollout_id_value' + assert arg == mock_val + + +def test_get_service_rollout_flattened_error(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_service_rollout( + servicemanager.GetServiceRolloutRequest(), + service_name='service_name_value', + rollout_id='rollout_id_value', + ) + +@pytest.mark.asyncio +async def test_get_service_rollout_flattened_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service_rollout), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Rollout() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Rollout()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_service_rollout( + service_name='service_name_value', + rollout_id='rollout_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + arg = args[0].rollout_id + mock_val = 'rollout_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_service_rollout_flattened_error_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_service_rollout( + servicemanager.GetServiceRolloutRequest(), + service_name='service_name_value', + rollout_id='rollout_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.CreateServiceRolloutRequest, + dict, +]) +def test_create_service_rollout(request_type, transport: str = 'grpc'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_rollout), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_service_rollout(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = servicemanager.CreateServiceRolloutRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_service_rollout_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = servicemanager.CreateServiceRolloutRequest( + service_name='service_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_rollout), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_service_rollout(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == servicemanager.CreateServiceRolloutRequest( + service_name='service_name_value', + ) + +def test_create_service_rollout_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_service_rollout in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_service_rollout] = mock_rpc + request = {} + client.create_service_rollout(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_service_rollout(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_service_rollout_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_service_rollout in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_service_rollout] = mock_rpc + + request = {} + await client.create_service_rollout(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_service_rollout(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_service_rollout_async(transport: str = 'grpc_asyncio', request_type=servicemanager.CreateServiceRolloutRequest): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_rollout), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_service_rollout(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = servicemanager.CreateServiceRolloutRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_service_rollout_async_from_dict(): + await test_create_service_rollout_async(request_type=dict) + +def test_create_service_rollout_field_headers(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.CreateServiceRolloutRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_rollout), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_service_rollout(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_service_rollout_field_headers_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = servicemanager.CreateServiceRolloutRequest() + + request.service_name = 'service_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_rollout), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_service_rollout(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'service_name=service_name_value', + ) in kw['metadata'] + + +def test_create_service_rollout_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_rollout), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_service_rollout( + service_name='service_name_value', + rollout=resources.Rollout(rollout_id='rollout_id_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + arg = args[0].rollout + mock_val = resources.Rollout(rollout_id='rollout_id_value') + assert arg == mock_val + + +def test_create_service_rollout_flattened_error(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_service_rollout( + servicemanager.CreateServiceRolloutRequest(), + service_name='service_name_value', + rollout=resources.Rollout(rollout_id='rollout_id_value'), + ) + +@pytest.mark.asyncio +async def test_create_service_rollout_flattened_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_service_rollout), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_service_rollout( + service_name='service_name_value', + rollout=resources.Rollout(rollout_id='rollout_id_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].service_name + mock_val = 'service_name_value' + assert arg == mock_val + arg = args[0].rollout + mock_val = resources.Rollout(rollout_id='rollout_id_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_service_rollout_flattened_error_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_service_rollout( + servicemanager.CreateServiceRolloutRequest(), + service_name='service_name_value', + rollout=resources.Rollout(rollout_id='rollout_id_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.GenerateConfigReportRequest, + dict, +]) +def test_generate_config_report(request_type, transport: str = 'grpc'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_config_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = servicemanager.GenerateConfigReportResponse( + service_name='service_name_value', + id='id_value', + ) + response = client.generate_config_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = servicemanager.GenerateConfigReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, servicemanager.GenerateConfigReportResponse) + assert response.service_name == 'service_name_value' + assert response.id == 'id_value' + + +def test_generate_config_report_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = servicemanager.GenerateConfigReportRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_config_report), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.generate_config_report(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == servicemanager.GenerateConfigReportRequest( + ) + +def test_generate_config_report_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_config_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.generate_config_report] = mock_rpc + request = {} + client.generate_config_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_config_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_generate_config_report_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.generate_config_report in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.generate_config_report] = mock_rpc + + request = {} + await client.generate_config_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.generate_config_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_generate_config_report_async(transport: str = 'grpc_asyncio', request_type=servicemanager.GenerateConfigReportRequest): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_config_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(servicemanager.GenerateConfigReportResponse( + service_name='service_name_value', + id='id_value', + )) + response = await client.generate_config_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = servicemanager.GenerateConfigReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, servicemanager.GenerateConfigReportResponse) + assert response.service_name == 'service_name_value' + assert response.id == 'id_value' + + +@pytest.mark.asyncio +async def test_generate_config_report_async_from_dict(): + await test_generate_config_report_async(request_type=dict) + + +def test_generate_config_report_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_config_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = servicemanager.GenerateConfigReportResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_config_report( + new_config=any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty'), + old_config=any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].new_config + mock_val = any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty') + assert arg == mock_val + arg = args[0].old_config + mock_val = any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty') + assert arg == mock_val + + +def test_generate_config_report_flattened_error(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_config_report( + servicemanager.GenerateConfigReportRequest(), + new_config=any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty'), + old_config=any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty'), + ) + +@pytest.mark.asyncio +async def test_generate_config_report_flattened_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_config_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = servicemanager.GenerateConfigReportResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(servicemanager.GenerateConfigReportResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_config_report( + new_config=any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty'), + old_config=any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].new_config + mock_val = any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty') + assert arg == mock_val + arg = args[0].old_config + mock_val = any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_generate_config_report_flattened_error_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_config_report( + servicemanager.GenerateConfigReportRequest(), + new_config=any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty'), + old_config=any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty'), + ) + + +def test_list_services_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_services in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_services] = mock_rpc + + request = {} + client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_services(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_services_rest_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = servicemanager.ListServicesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + producer_project_id='producer_project_id_value', + consumer_id='consumer_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = servicemanager.ListServicesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_services(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/services" % client.transport._host, args[1]) + + +def test_list_services_rest_flattened_error(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_services( + servicemanager.ListServicesRequest(), + producer_project_id='producer_project_id_value', + consumer_id='consumer_id_value', + ) + + +def test_list_services_rest_pager(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + servicemanager.ListServicesResponse( + services=[ + resources.ManagedService(), + resources.ManagedService(), + resources.ManagedService(), + ], + next_page_token='abc', + ), + servicemanager.ListServicesResponse( + services=[], + next_page_token='def', + ), + servicemanager.ListServicesResponse( + services=[ + resources.ManagedService(), + ], + next_page_token='ghi', + ), + servicemanager.ListServicesResponse( + services=[ + resources.ManagedService(), + resources.ManagedService(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(servicemanager.ListServicesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_services(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.ManagedService) + for i in results) + + pages = list(client.list_services(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_service in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_service] = mock_rpc + + request = {} + client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_service_rest_required_fields(request_type=servicemanager.GetServiceRequest): + transport_class = transports.ServiceManagerRestTransport + + request_init = {} + request_init["service_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["serviceName"] = 'service_name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "serviceName" in jsonified_request + assert jsonified_request["serviceName"] == 'service_name_value' + + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.ManagedService() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.ManagedService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_service(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_service_rest_unset_required_fields(): + transport = transports.ServiceManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_service._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("serviceName", ))) + + +def test_get_service_rest_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.ManagedService() + + # get arguments that satisfy an http rule for this method + sample_request = {'service_name': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + service_name='service_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.ManagedService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_service(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/services/{service_name}" % client.transport._host, args[1]) + + +def test_get_service_rest_flattened_error(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_service( + servicemanager.GetServiceRequest(), + service_name='service_name_value', + ) + + +def test_create_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_service in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_service] = mock_rpc + + request = {} + client.create_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_service_rest_required_fields(request_type=servicemanager.CreateServiceRequest): + transport_class = transports.ServiceManagerRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_service(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_service_rest_unset_required_fields(): + transport = transports.ServiceManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_service._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("service", ))) + + +def test_create_service_rest_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + service=resources.ManagedService(service_name='service_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_service(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/services" % client.transport._host, args[1]) + + +def test_create_service_rest_flattened_error(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_service( + servicemanager.CreateServiceRequest(), + service=resources.ManagedService(service_name='service_name_value'), + ) + + +def test_delete_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_service in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_service] = mock_rpc + + request = {} + client.delete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_service_rest_required_fields(request_type=servicemanager.DeleteServiceRequest): + transport_class = transports.ServiceManagerRestTransport + + request_init = {} + request_init["service_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["serviceName"] = 'service_name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "serviceName" in jsonified_request + assert jsonified_request["serviceName"] == 'service_name_value' + + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_service(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_service_rest_unset_required_fields(): + transport = transports.ServiceManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_service._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("serviceName", ))) + + +def test_delete_service_rest_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'service_name': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + service_name='service_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_service(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/services/{service_name}" % client.transport._host, args[1]) + + +def test_delete_service_rest_flattened_error(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_service( + servicemanager.DeleteServiceRequest(), + service_name='service_name_value', + ) + + +def test_undelete_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.undelete_service in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.undelete_service] = mock_rpc + + request = {} + client.undelete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.undelete_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_undelete_service_rest_required_fields(request_type=servicemanager.UndeleteServiceRequest): + transport_class = transports.ServiceManagerRestTransport + + request_init = {} + request_init["service_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).undelete_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["serviceName"] = 'service_name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).undelete_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "serviceName" in jsonified_request + assert jsonified_request["serviceName"] == 'service_name_value' + + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.undelete_service(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_undelete_service_rest_unset_required_fields(): + transport = transports.ServiceManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.undelete_service._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("serviceName", ))) + + +def test_undelete_service_rest_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'service_name': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + service_name='service_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.undelete_service(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/services/{service_name}:undelete" % client.transport._host, args[1]) + + +def test_undelete_service_rest_flattened_error(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.undelete_service( + servicemanager.UndeleteServiceRequest(), + service_name='service_name_value', + ) + + +def test_list_service_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_service_configs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_service_configs] = mock_rpc + + request = {} + client.list_service_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_service_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_service_configs_rest_required_fields(request_type=servicemanager.ListServiceConfigsRequest): + transport_class = transports.ServiceManagerRestTransport + + request_init = {} + request_init["service_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_service_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["serviceName"] = 'service_name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_service_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "serviceName" in jsonified_request + assert jsonified_request["serviceName"] == 'service_name_value' + + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = servicemanager.ListServiceConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = servicemanager.ListServiceConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_service_configs(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_service_configs_rest_unset_required_fields(): + transport = transports.ServiceManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_service_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("serviceName", ))) + + +def test_list_service_configs_rest_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = servicemanager.ListServiceConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'service_name': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + service_name='service_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = servicemanager.ListServiceConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_service_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/services/{service_name}/configs" % client.transport._host, args[1]) + + +def test_list_service_configs_rest_flattened_error(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_service_configs( + servicemanager.ListServiceConfigsRequest(), + service_name='service_name_value', + ) + + +def test_list_service_configs_rest_pager(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + servicemanager.ListServiceConfigsResponse( + service_configs=[ + service_pb2.Service(), + service_pb2.Service(), + service_pb2.Service(), + ], + next_page_token='abc', + ), + servicemanager.ListServiceConfigsResponse( + service_configs=[], + next_page_token='def', + ), + servicemanager.ListServiceConfigsResponse( + service_configs=[ + service_pb2.Service(), + ], + next_page_token='ghi', + ), + servicemanager.ListServiceConfigsResponse( + service_configs=[ + service_pb2.Service(), + service_pb2.Service(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(servicemanager.ListServiceConfigsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'service_name': 'sample1'} + + pager = client.list_service_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, service_pb2.Service) + for i in results) + + pages = list(client.list_service_configs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_service_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_service_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_service_config] = mock_rpc + + request = {} + client.get_service_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_service_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_service_config_rest_required_fields(request_type=servicemanager.GetServiceConfigRequest): + transport_class = transports.ServiceManagerRestTransport + + request_init = {} + request_init["service_name"] = "" + request_init["config_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_service_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["serviceName"] = 'service_name_value' + jsonified_request["configId"] = 'config_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_service_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "serviceName" in jsonified_request + assert jsonified_request["serviceName"] == 'service_name_value' + assert "configId" in jsonified_request + assert jsonified_request["configId"] == 'config_id_value' + + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service_pb2.Service() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_service_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_service_config_rest_unset_required_fields(): + transport = transports.ServiceManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_service_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view", )) & set(("serviceName", "configId", ))) + + +def test_get_service_config_rest_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service_pb2.Service() + + # get arguments that satisfy an http rule for this method + sample_request = {'service_name': 'sample1', 'config_id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + service_name='service_name_value', + config_id='config_id_value', + view=servicemanager.GetServiceConfigRequest.ConfigView.FULL, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_service_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/services/{service_name}/configs/{config_id}" % client.transport._host, args[1]) + + +def test_get_service_config_rest_flattened_error(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_service_config( + servicemanager.GetServiceConfigRequest(), + service_name='service_name_value', + config_id='config_id_value', + view=servicemanager.GetServiceConfigRequest.ConfigView.FULL, + ) + + +def test_create_service_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_service_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_service_config] = mock_rpc + + request = {} + client.create_service_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_service_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_service_config_rest_required_fields(request_type=servicemanager.CreateServiceConfigRequest): + transport_class = transports.ServiceManagerRestTransport + + request_init = {} + request_init["service_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_service_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["serviceName"] = 'service_name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_service_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "serviceName" in jsonified_request + assert jsonified_request["serviceName"] == 'service_name_value' + + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service_pb2.Service() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_service_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_service_config_rest_unset_required_fields(): + transport = transports.ServiceManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_service_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("serviceName", "serviceConfig", ))) + + +def test_create_service_config_rest_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service_pb2.Service() + + # get arguments that satisfy an http rule for this method + sample_request = {'service_name': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + service_name='service_name_value', + service_config=service_pb2.Service(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_service_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/services/{service_name}/configs" % client.transport._host, args[1]) + + +def test_create_service_config_rest_flattened_error(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_service_config( + servicemanager.CreateServiceConfigRequest(), + service_name='service_name_value', + service_config=service_pb2.Service(name='name_value'), + ) + + +def test_submit_config_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.submit_config_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.submit_config_source] = mock_rpc + + request = {} + client.submit_config_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.submit_config_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_submit_config_source_rest_required_fields(request_type=servicemanager.SubmitConfigSourceRequest): + transport_class = transports.ServiceManagerRestTransport + + request_init = {} + request_init["service_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).submit_config_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["serviceName"] = 'service_name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).submit_config_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "serviceName" in jsonified_request + assert jsonified_request["serviceName"] == 'service_name_value' + + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.submit_config_source(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_submit_config_source_rest_unset_required_fields(): + transport = transports.ServiceManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.submit_config_source._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("serviceName", "configSource", ))) + + +def test_submit_config_source_rest_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'service_name': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + service_name='service_name_value', + config_source=resources.ConfigSource(id='id_value'), + validate_only=True, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.submit_config_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/services/{service_name}/configs:submit" % client.transport._host, args[1]) + + +def test_submit_config_source_rest_flattened_error(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.submit_config_source( + servicemanager.SubmitConfigSourceRequest(), + service_name='service_name_value', + config_source=resources.ConfigSource(id='id_value'), + validate_only=True, + ) + + +def test_list_service_rollouts_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_service_rollouts in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_service_rollouts] = mock_rpc + + request = {} + client.list_service_rollouts(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_service_rollouts(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_service_rollouts_rest_required_fields(request_type=servicemanager.ListServiceRolloutsRequest): + transport_class = transports.ServiceManagerRestTransport + + request_init = {} + request_init["service_name"] = "" + request_init["filter"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "filter" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_service_rollouts._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "filter" in jsonified_request + assert jsonified_request["filter"] == request_init["filter"] + + jsonified_request["serviceName"] = 'service_name_value' + jsonified_request["filter"] = 'filter_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_service_rollouts._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "serviceName" in jsonified_request + assert jsonified_request["serviceName"] == 'service_name_value' + assert "filter" in jsonified_request + assert jsonified_request["filter"] == 'filter_value' + + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = servicemanager.ListServiceRolloutsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = servicemanager.ListServiceRolloutsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_service_rollouts(request) + + expected_params = [ + ( + "filter", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_service_rollouts_rest_unset_required_fields(): + transport = transports.ServiceManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_service_rollouts._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("serviceName", "filter", ))) + + +def test_list_service_rollouts_rest_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = servicemanager.ListServiceRolloutsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'service_name': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + service_name='service_name_value', + filter='filter_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = servicemanager.ListServiceRolloutsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_service_rollouts(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/services/{service_name}/rollouts" % client.transport._host, args[1]) + + +def test_list_service_rollouts_rest_flattened_error(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_service_rollouts( + servicemanager.ListServiceRolloutsRequest(), + service_name='service_name_value', + filter='filter_value', + ) + + +def test_list_service_rollouts_rest_pager(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + servicemanager.ListServiceRolloutsResponse( + rollouts=[ + resources.Rollout(), + resources.Rollout(), + resources.Rollout(), + ], + next_page_token='abc', + ), + servicemanager.ListServiceRolloutsResponse( + rollouts=[], + next_page_token='def', + ), + servicemanager.ListServiceRolloutsResponse( + rollouts=[ + resources.Rollout(), + ], + next_page_token='ghi', + ), + servicemanager.ListServiceRolloutsResponse( + rollouts=[ + resources.Rollout(), + resources.Rollout(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(servicemanager.ListServiceRolloutsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'service_name': 'sample1'} + + pager = client.list_service_rollouts(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Rollout) + for i in results) + + pages = list(client.list_service_rollouts(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_service_rollout_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_service_rollout in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_service_rollout] = mock_rpc + + request = {} + client.get_service_rollout(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_service_rollout(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_service_rollout_rest_required_fields(request_type=servicemanager.GetServiceRolloutRequest): + transport_class = transports.ServiceManagerRestTransport + + request_init = {} + request_init["service_name"] = "" + request_init["rollout_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_service_rollout._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["serviceName"] = 'service_name_value' + jsonified_request["rolloutId"] = 'rollout_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_service_rollout._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "serviceName" in jsonified_request + assert jsonified_request["serviceName"] == 'service_name_value' + assert "rolloutId" in jsonified_request + assert jsonified_request["rolloutId"] == 'rollout_id_value' + + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Rollout() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Rollout.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_service_rollout(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_service_rollout_rest_unset_required_fields(): + transport = transports.ServiceManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_service_rollout._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("serviceName", "rolloutId", ))) + + +def test_get_service_rollout_rest_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.Rollout() + + # get arguments that satisfy an http rule for this method + sample_request = {'service_name': 'sample1', 'rollout_id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + service_name='service_name_value', + rollout_id='rollout_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Rollout.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_service_rollout(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/services/{service_name}/rollouts/{rollout_id}" % client.transport._host, args[1]) + + +def test_get_service_rollout_rest_flattened_error(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_service_rollout( + servicemanager.GetServiceRolloutRequest(), + service_name='service_name_value', + rollout_id='rollout_id_value', + ) + + +def test_create_service_rollout_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_service_rollout in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_service_rollout] = mock_rpc + + request = {} + client.create_service_rollout(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_service_rollout(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_service_rollout_rest_required_fields(request_type=servicemanager.CreateServiceRolloutRequest): + transport_class = transports.ServiceManagerRestTransport + + request_init = {} + request_init["service_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_service_rollout._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["serviceName"] = 'service_name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_service_rollout._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "serviceName" in jsonified_request + assert jsonified_request["serviceName"] == 'service_name_value' + + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_service_rollout(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_service_rollout_rest_unset_required_fields(): + transport = transports.ServiceManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_service_rollout._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("serviceName", "rollout", ))) + + +def test_create_service_rollout_rest_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'service_name': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + service_name='service_name_value', + rollout=resources.Rollout(rollout_id='rollout_id_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_service_rollout(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/services/{service_name}/rollouts" % client.transport._host, args[1]) + + +def test_create_service_rollout_rest_flattened_error(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_service_rollout( + servicemanager.CreateServiceRolloutRequest(), + service_name='service_name_value', + rollout=resources.Rollout(rollout_id='rollout_id_value'), + ) + + +def test_generate_config_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_config_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.generate_config_report] = mock_rpc + + request = {} + client.generate_config_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_config_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_generate_config_report_rest_required_fields(request_type=servicemanager.GenerateConfigReportRequest): + transport_class = transports.ServiceManagerRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_config_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_config_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = servicemanager.GenerateConfigReportResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = servicemanager.GenerateConfigReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.generate_config_report(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_generate_config_report_rest_unset_required_fields(): + transport = transports.ServiceManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.generate_config_report._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("newConfig", ))) + + +def test_generate_config_report_rest_flattened(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = servicemanager.GenerateConfigReportResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + new_config=any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty'), + old_config=any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = servicemanager.GenerateConfigReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.generate_config_report(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/services:generateConfigReport" % client.transport._host, args[1]) + + +def test_generate_config_report_rest_flattened_error(transport: str = 'rest'): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_config_report( + servicemanager.GenerateConfigReportRequest(), + new_config=any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty'), + old_config=any_pb2.Any(type_url='type.googleapis.com/google.protobuf.Empty'), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ServiceManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ServiceManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceManagerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ServiceManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ServiceManagerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ServiceManagerClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ServiceManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceManagerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ServiceManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ServiceManagerClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ServiceManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ServiceManagerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.ServiceManagerGrpcTransport, + transports.ServiceManagerGrpcAsyncIOTransport, + transports.ServiceManagerRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = ServiceManagerClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_services_empty_call_grpc(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + call.return_value = servicemanager.ListServicesResponse() + client.list_services(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.ListServicesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_service_empty_call_grpc(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + call.return_value = resources.ManagedService() + client.get_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.GetServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_service_empty_call_grpc(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_service), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.CreateServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_service_empty_call_grpc(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.DeleteServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_undelete_service_empty_call_grpc(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.undelete_service), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.undelete_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.UndeleteServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_service_configs_empty_call_grpc(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_service_configs), + '__call__') as call: + call.return_value = servicemanager.ListServiceConfigsResponse() + client.list_service_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.ListServiceConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_service_config_empty_call_grpc(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_service_config), + '__call__') as call: + call.return_value = service_pb2.Service() + client.get_service_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.GetServiceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_service_config_empty_call_grpc(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_service_config), + '__call__') as call: + call.return_value = service_pb2.Service() + client.create_service_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.CreateServiceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_submit_config_source_empty_call_grpc(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.submit_config_source), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.submit_config_source(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.SubmitConfigSourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_service_rollouts_empty_call_grpc(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_service_rollouts), + '__call__') as call: + call.return_value = servicemanager.ListServiceRolloutsResponse() + client.list_service_rollouts(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.ListServiceRolloutsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_service_rollout_empty_call_grpc(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_service_rollout), + '__call__') as call: + call.return_value = resources.Rollout() + client.get_service_rollout(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.GetServiceRolloutRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_service_rollout_empty_call_grpc(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_service_rollout), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_service_rollout(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.CreateServiceRolloutRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_config_report_empty_call_grpc(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_config_report), + '__call__') as call: + call.return_value = servicemanager.GenerateConfigReportResponse() + client.generate_config_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.GenerateConfigReportRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ServiceManagerAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_services_empty_call_grpc_asyncio(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(servicemanager.ListServicesResponse( + next_page_token='next_page_token_value', + )) + await client.list_services(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.ListServicesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_service_empty_call_grpc_asyncio(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.ManagedService( + service_name='service_name_value', + producer_project_id='producer_project_id_value', + )) + await client.get_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.GetServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_service_empty_call_grpc_asyncio(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.CreateServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_service_empty_call_grpc_asyncio(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.DeleteServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_undelete_service_empty_call_grpc_asyncio(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.undelete_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.undelete_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.UndeleteServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_service_configs_empty_call_grpc_asyncio(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_service_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(servicemanager.ListServiceConfigsResponse( + next_page_token='next_page_token_value', + )) + await client.list_service_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.ListServiceConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_service_config_empty_call_grpc_asyncio(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_service_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_pb2.Service( + name='name_value', + title='title_value', + producer_project_id='producer_project_id_value', + id='id_value', + )) + await client.get_service_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.GetServiceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_service_config_empty_call_grpc_asyncio(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_service_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service_pb2.Service( + name='name_value', + title='title_value', + producer_project_id='producer_project_id_value', + id='id_value', + )) + await client.create_service_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.CreateServiceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_submit_config_source_empty_call_grpc_asyncio(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.submit_config_source), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.submit_config_source(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.SubmitConfigSourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_service_rollouts_empty_call_grpc_asyncio(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_service_rollouts), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(servicemanager.ListServiceRolloutsResponse( + next_page_token='next_page_token_value', + )) + await client.list_service_rollouts(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.ListServiceRolloutsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_service_rollout_empty_call_grpc_asyncio(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_service_rollout), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Rollout( + rollout_id='rollout_id_value', + created_by='created_by_value', + status=resources.Rollout.RolloutStatus.IN_PROGRESS, + service_name='service_name_value', + )) + await client.get_service_rollout(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.GetServiceRolloutRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_service_rollout_empty_call_grpc_asyncio(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_service_rollout), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_service_rollout(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.CreateServiceRolloutRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_generate_config_report_empty_call_grpc_asyncio(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_config_report), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(servicemanager.GenerateConfigReportResponse( + service_name='service_name_value', + id='id_value', + )) + await client.generate_config_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.GenerateConfigReportRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = ServiceManagerClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_services_rest_bad_request(request_type=servicemanager.ListServicesRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_services(request) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.ListServicesRequest, + dict, +]) +def test_list_services_rest_call_success(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = servicemanager.ListServicesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = servicemanager.ListServicesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_services(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServicesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_services_rest_interceptors(null_interceptor): + transport = transports.ServiceManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceManagerRestInterceptor(), + ) + client = ServiceManagerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "post_list_services") as post, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "pre_list_services") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = servicemanager.ListServicesRequest.pb(servicemanager.ListServicesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = servicemanager.ListServicesResponse.to_json(servicemanager.ListServicesResponse()) + req.return_value.content = return_value + + request = servicemanager.ListServicesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = servicemanager.ListServicesResponse() + + client.list_services(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_service_rest_bad_request(request_type=servicemanager.GetServiceRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_service(request) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.GetServiceRequest, + dict, +]) +def test_get_service_rest_call_success(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.ManagedService( + service_name='service_name_value', + producer_project_id='producer_project_id_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.ManagedService.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_service(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ManagedService) + assert response.service_name == 'service_name_value' + assert response.producer_project_id == 'producer_project_id_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_service_rest_interceptors(null_interceptor): + transport = transports.ServiceManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceManagerRestInterceptor(), + ) + client = ServiceManagerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "post_get_service") as post, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "pre_get_service") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = servicemanager.GetServiceRequest.pb(servicemanager.GetServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = resources.ManagedService.to_json(resources.ManagedService()) + req.return_value.content = return_value + + request = servicemanager.GetServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.ManagedService() + + client.get_service(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_service_rest_bad_request(request_type=servicemanager.CreateServiceRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_service(request) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.CreateServiceRequest, + dict, +]) +def test_create_service_rest_call_success(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request_init["service"] = {'service_name': 'service_name_value', 'producer_project_id': 'producer_project_id_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = servicemanager.CreateServiceRequest.meta.fields["service"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["service"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["service"][field])): + del request_init["service"][field][i][subfield] + else: + del request_init["service"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_service(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_service_rest_interceptors(null_interceptor): + transport = transports.ServiceManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceManagerRestInterceptor(), + ) + client = ServiceManagerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "post_create_service") as post, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "pre_create_service") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = servicemanager.CreateServiceRequest.pb(servicemanager.CreateServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = servicemanager.CreateServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_service(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_service_rest_bad_request(request_type=servicemanager.DeleteServiceRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_service(request) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.DeleteServiceRequest, + dict, +]) +def test_delete_service_rest_call_success(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_service(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_service_rest_interceptors(null_interceptor): + transport = transports.ServiceManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceManagerRestInterceptor(), + ) + client = ServiceManagerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "post_delete_service") as post, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "pre_delete_service") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = servicemanager.DeleteServiceRequest.pb(servicemanager.DeleteServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = servicemanager.DeleteServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_service(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_undelete_service_rest_bad_request(request_type=servicemanager.UndeleteServiceRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.undelete_service(request) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.UndeleteServiceRequest, + dict, +]) +def test_undelete_service_rest_call_success(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.undelete_service(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_undelete_service_rest_interceptors(null_interceptor): + transport = transports.ServiceManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceManagerRestInterceptor(), + ) + client = ServiceManagerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "post_undelete_service") as post, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "pre_undelete_service") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = servicemanager.UndeleteServiceRequest.pb(servicemanager.UndeleteServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = servicemanager.UndeleteServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.undelete_service(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_service_configs_rest_bad_request(request_type=servicemanager.ListServiceConfigsRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_service_configs(request) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.ListServiceConfigsRequest, + dict, +]) +def test_list_service_configs_rest_call_success(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = servicemanager.ListServiceConfigsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = servicemanager.ListServiceConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_service_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServiceConfigsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_service_configs_rest_interceptors(null_interceptor): + transport = transports.ServiceManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceManagerRestInterceptor(), + ) + client = ServiceManagerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "post_list_service_configs") as post, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "pre_list_service_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = servicemanager.ListServiceConfigsRequest.pb(servicemanager.ListServiceConfigsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = servicemanager.ListServiceConfigsResponse.to_json(servicemanager.ListServiceConfigsResponse()) + req.return_value.content = return_value + + request = servicemanager.ListServiceConfigsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = servicemanager.ListServiceConfigsResponse() + + client.list_service_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_service_config_rest_bad_request(request_type=servicemanager.GetServiceConfigRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1', 'config_id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_service_config(request) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.GetServiceConfigRequest, + dict, +]) +def test_get_service_config_rest_call_success(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1', 'config_id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service_pb2.Service( + name='name_value', + title='title_value', + producer_project_id='producer_project_id_value', + id='id_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_service_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service_pb2.Service) + assert response.name == 'name_value' + assert response.title == 'title_value' + assert response.producer_project_id == 'producer_project_id_value' + assert response.id == 'id_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_service_config_rest_interceptors(null_interceptor): + transport = transports.ServiceManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceManagerRestInterceptor(), + ) + client = ServiceManagerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "post_get_service_config") as post, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "pre_get_service_config") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = servicemanager.GetServiceConfigRequest.pb(servicemanager.GetServiceConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(service_pb2.Service()) + req.return_value.content = return_value + + request = servicemanager.GetServiceConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service_pb2.Service() + + client.get_service_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_service_config_rest_bad_request(request_type=servicemanager.CreateServiceConfigRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_service_config(request) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.CreateServiceConfigRequest, + dict, +]) +def test_create_service_config_rest_call_success(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request_init["service_config"] = {'name': 'name_value', 'title': 'title_value', 'producer_project_id': 'producer_project_id_value', 'id': 'id_value', 'apis': [{'name': 'name_value', 'methods': [{'name': 'name_value', 'request_type_url': 'request_type_url_value', 'request_streaming': True, 'response_type_url': 'response_type_url_value', 'response_streaming': True, 'options': [{'name': 'name_value', 'value': {'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}}], 'syntax': 1}], 'options': {}, 'version': 'version_value', 'source_context': {'file_name': 'file_name_value'}, 'mixins': [{'name': 'name_value', 'root': 'root_value'}], 'syntax': 1}], 'types': [{'name': 'name_value', 'fields': [{'kind': 1, 'cardinality': 1, 'number': 649, 'name': 'name_value', 'type_url': 'type.googleapis.com/google.protobuf.Empty', 'oneof_index': 1166, 'packed': True, 'options': {}, 'json_name': 'json_name_value', 'default_value': 'default_value_value'}], 'oneofs': ['oneofs_value1', 'oneofs_value2'], 'options': {}, 'source_context': {}, 'syntax': 1, 'edition': 'edition_value'}], 'enums': [{'name': 'name_value', 'enumvalue': [{'name': 'name_value', 'number': 649, 'options': {}}], 'options': {}, 'source_context': {}, 'syntax': 1, 'edition': 'edition_value'}], 'documentation': {'summary': 'summary_value', 'pages': [{'name': 'name_value', 'content': 'content_value', 'subpages': {}}], 'rules': [{'selector': 'selector_value', 'description': 'description_value', 'deprecation_description': 'deprecation_description_value'}], 'documentation_root_url': 'documentation_root_url_value', 'service_root_url': 'service_root_url_value', 'overview': 'overview_value'}, 'backend': {'rules': [{'selector': 'selector_value', 'address': 'address_value', 'deadline': 0.8220000000000001, 'min_deadline': 0.1241, 'operation_deadline': 0.1894, 'path_translation': 1, 'jwt_audience': 'jwt_audience_value', 'disable_auth': True, 'protocol': 'protocol_value', 'overrides_by_request_protocol': {}}]}, 'http': {'rules': [{'selector': 'selector_value', 'get': 'get_value', 'put': 'put_value', 'post': 'post_value', 'delete': 'delete_value', 'patch': 'patch_value', 'custom': {'kind': 'kind_value', 'path': 'path_value'}, 'body': 'body_value', 'response_body': 'response_body_value', 'additional_bindings': {}}], 'fully_decode_reserved_expansion': True}, 'quota': {'limits': [{'name': 'name_value', 'description': 'description_value', 'default_limit': 1379, 'max_limit': 964, 'free_tier': 949, 'duration': 'duration_value', 'metric': 'metric_value', 'unit': 'unit_value', 'values': {}, 'display_name': 'display_name_value'}], 'metric_rules': [{'selector': 'selector_value', 'metric_costs': {}}]}, 'authentication': {'rules': [{'selector': 'selector_value', 'oauth': {'canonical_scopes': 'canonical_scopes_value'}, 'allow_without_credential': True, 'requirements': [{'provider_id': 'provider_id_value', 'audiences': 'audiences_value'}]}], 'providers': [{'id': 'id_value', 'issuer': 'issuer_value', 'jwks_uri': 'jwks_uri_value', 'audiences': 'audiences_value', 'authorization_url': 'authorization_url_value', 'jwt_locations': [{'header': 'header_value', 'query': 'query_value', 'cookie': 'cookie_value', 'value_prefix': 'value_prefix_value'}]}]}, 'context': {'rules': [{'selector': 'selector_value', 'requested': ['requested_value1', 'requested_value2'], 'provided': ['provided_value1', 'provided_value2'], 'allowed_request_extensions': ['allowed_request_extensions_value1', 'allowed_request_extensions_value2'], 'allowed_response_extensions': ['allowed_response_extensions_value1', 'allowed_response_extensions_value2']}]}, 'usage': {'requirements': ['requirements_value1', 'requirements_value2'], 'rules': [{'selector': 'selector_value', 'allow_unregistered_calls': True, 'skip_service_control': True}], 'producer_notification_channel': 'producer_notification_channel_value'}, 'endpoints': [{'name': 'name_value', 'aliases': ['aliases_value1', 'aliases_value2'], 'target': 'target_value', 'allow_cors': True}], 'control': {'environment': 'environment_value', 'method_policies': [{'selector': 'selector_value', 'request_policies': [{'selector': 'selector_value', 'resource_permission': 'resource_permission_value', 'resource_type': 'resource_type_value'}]}]}, 'logs': [{'name': 'name_value', 'labels': [{'key': 'key_value', 'value_type': 1, 'description': 'description_value'}], 'description': 'description_value', 'display_name': 'display_name_value'}], 'metrics': [{'name': 'name_value', 'type': 'type_value', 'labels': {}, 'metric_kind': 1, 'value_type': 1, 'unit': 'unit_value', 'description': 'description_value', 'display_name': 'display_name_value', 'metadata': {'launch_stage': 6, 'sample_period': {'seconds': 751, 'nanos': 543}, 'ingest_delay': {}, 'time_series_resource_hierarchy_level': [1]}, 'launch_stage': 6, 'monitored_resource_types': ['monitored_resource_types_value1', 'monitored_resource_types_value2']}], 'monitored_resources': [{'name': 'name_value', 'type': 'type_value', 'display_name': 'display_name_value', 'description': 'description_value', 'labels': {}, 'launch_stage': 6}], 'billing': {'consumer_destinations': [{'monitored_resource': 'monitored_resource_value', 'metrics': ['metrics_value1', 'metrics_value2']}]}, 'logging': {'producer_destinations': [{'monitored_resource': 'monitored_resource_value', 'logs': ['logs_value1', 'logs_value2']}], 'consumer_destinations': {}}, 'monitoring': {'producer_destinations': [{'monitored_resource': 'monitored_resource_value', 'metrics': ['metrics_value1', 'metrics_value2']}], 'consumer_destinations': {}}, 'system_parameters': {'rules': [{'selector': 'selector_value', 'parameters': [{'name': 'name_value', 'http_header': 'http_header_value', 'url_query_parameter': 'url_query_parameter_value'}]}]}, 'source_info': {'source_files': {}}, 'publishing': {'method_settings': [{'selector': 'selector_value', 'long_running': {'initial_poll_delay': {}, 'poll_delay_multiplier': 0.22510000000000002, 'max_poll_delay': {}, 'total_poll_timeout': {}}, 'auto_populated_fields': ['auto_populated_fields_value1', 'auto_populated_fields_value2']}], 'new_issue_uri': 'new_issue_uri_value', 'documentation_uri': 'documentation_uri_value', 'api_short_name': 'api_short_name_value', 'github_label': 'github_label_value', 'codeowner_github_teams': ['codeowner_github_teams_value1', 'codeowner_github_teams_value2'], 'doc_tag_prefix': 'doc_tag_prefix_value', 'organization': 1, 'library_settings': [{'version': 'version_value', 'launch_stage': 6, 'rest_numeric_enums': True, 'java_settings': {'library_package': 'library_package_value', 'service_class_names': {}, 'common': {'reference_docs_uri': 'reference_docs_uri_value', 'destinations': [10], 'selective_gapic_generation': {'methods': ['methods_value1', 'methods_value2']}}}, 'cpp_settings': {'common': {}}, 'php_settings': {'common': {}}, 'python_settings': {'common': {}, 'experimental_features': {'rest_async_io_enabled': True}}, 'node_settings': {'common': {}}, 'dotnet_settings': {'common': {}, 'renamed_services': {}, 'renamed_resources': {}, 'ignored_resources': ['ignored_resources_value1', 'ignored_resources_value2'], 'forced_namespace_aliases': ['forced_namespace_aliases_value1', 'forced_namespace_aliases_value2'], 'handwritten_signatures': ['handwritten_signatures_value1', 'handwritten_signatures_value2']}, 'ruby_settings': {'common': {}}, 'go_settings': {'common': {}}}], 'proto_reference_documentation_uri': 'proto_reference_documentation_uri_value', 'rest_reference_documentation_uri': 'rest_reference_documentation_uri_value'}, 'config_version': {'value': 541}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = servicemanager.CreateServiceConfigRequest.meta.fields["service_config"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["service_config"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["service_config"][field])): + del request_init["service_config"][field][i][subfield] + else: + del request_init["service_config"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service_pb2.Service( + name='name_value', + title='title_value', + producer_project_id='producer_project_id_value', + id='id_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_service_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service_pb2.Service) + assert response.name == 'name_value' + assert response.title == 'title_value' + assert response.producer_project_id == 'producer_project_id_value' + assert response.id == 'id_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_service_config_rest_interceptors(null_interceptor): + transport = transports.ServiceManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceManagerRestInterceptor(), + ) + client = ServiceManagerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "post_create_service_config") as post, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "pre_create_service_config") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = servicemanager.CreateServiceConfigRequest.pb(servicemanager.CreateServiceConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(service_pb2.Service()) + req.return_value.content = return_value + + request = servicemanager.CreateServiceConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service_pb2.Service() + + client.create_service_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_submit_config_source_rest_bad_request(request_type=servicemanager.SubmitConfigSourceRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.submit_config_source(request) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.SubmitConfigSourceRequest, + dict, +]) +def test_submit_config_source_rest_call_success(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.submit_config_source(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_submit_config_source_rest_interceptors(null_interceptor): + transport = transports.ServiceManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceManagerRestInterceptor(), + ) + client = ServiceManagerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "post_submit_config_source") as post, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "pre_submit_config_source") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = servicemanager.SubmitConfigSourceRequest.pb(servicemanager.SubmitConfigSourceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = servicemanager.SubmitConfigSourceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.submit_config_source(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_service_rollouts_rest_bad_request(request_type=servicemanager.ListServiceRolloutsRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_service_rollouts(request) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.ListServiceRolloutsRequest, + dict, +]) +def test_list_service_rollouts_rest_call_success(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = servicemanager.ListServiceRolloutsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = servicemanager.ListServiceRolloutsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_service_rollouts(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServiceRolloutsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_service_rollouts_rest_interceptors(null_interceptor): + transport = transports.ServiceManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceManagerRestInterceptor(), + ) + client = ServiceManagerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "post_list_service_rollouts") as post, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "pre_list_service_rollouts") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = servicemanager.ListServiceRolloutsRequest.pb(servicemanager.ListServiceRolloutsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = servicemanager.ListServiceRolloutsResponse.to_json(servicemanager.ListServiceRolloutsResponse()) + req.return_value.content = return_value + + request = servicemanager.ListServiceRolloutsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = servicemanager.ListServiceRolloutsResponse() + + client.list_service_rollouts(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_service_rollout_rest_bad_request(request_type=servicemanager.GetServiceRolloutRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1', 'rollout_id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_service_rollout(request) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.GetServiceRolloutRequest, + dict, +]) +def test_get_service_rollout_rest_call_success(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1', 'rollout_id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.Rollout( + rollout_id='rollout_id_value', + created_by='created_by_value', + status=resources.Rollout.RolloutStatus.IN_PROGRESS, + service_name='service_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Rollout.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_service_rollout(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Rollout) + assert response.rollout_id == 'rollout_id_value' + assert response.created_by == 'created_by_value' + assert response.status == resources.Rollout.RolloutStatus.IN_PROGRESS + assert response.service_name == 'service_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_service_rollout_rest_interceptors(null_interceptor): + transport = transports.ServiceManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceManagerRestInterceptor(), + ) + client = ServiceManagerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "post_get_service_rollout") as post, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "pre_get_service_rollout") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = servicemanager.GetServiceRolloutRequest.pb(servicemanager.GetServiceRolloutRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = resources.Rollout.to_json(resources.Rollout()) + req.return_value.content = return_value + + request = servicemanager.GetServiceRolloutRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Rollout() + + client.get_service_rollout(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_service_rollout_rest_bad_request(request_type=servicemanager.CreateServiceRolloutRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_service_rollout(request) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.CreateServiceRolloutRequest, + dict, +]) +def test_create_service_rollout_rest_call_success(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'service_name': 'sample1'} + request_init["rollout"] = {'rollout_id': 'rollout_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'created_by': 'created_by_value', 'status': 1, 'traffic_percent_strategy': {'percentages': {}}, 'delete_service_strategy': {}, 'service_name': 'service_name_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = servicemanager.CreateServiceRolloutRequest.meta.fields["rollout"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["rollout"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["rollout"][field])): + del request_init["rollout"][field][i][subfield] + else: + del request_init["rollout"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_service_rollout(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_service_rollout_rest_interceptors(null_interceptor): + transport = transports.ServiceManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceManagerRestInterceptor(), + ) + client = ServiceManagerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "post_create_service_rollout") as post, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "pre_create_service_rollout") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = servicemanager.CreateServiceRolloutRequest.pb(servicemanager.CreateServiceRolloutRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = servicemanager.CreateServiceRolloutRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_service_rollout(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_config_report_rest_bad_request(request_type=servicemanager.GenerateConfigReportRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.generate_config_report(request) + + +@pytest.mark.parametrize("request_type", [ + servicemanager.GenerateConfigReportRequest, + dict, +]) +def test_generate_config_report_rest_call_success(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = servicemanager.GenerateConfigReportResponse( + service_name='service_name_value', + id='id_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = servicemanager.GenerateConfigReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.generate_config_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, servicemanager.GenerateConfigReportResponse) + assert response.service_name == 'service_name_value' + assert response.id == 'id_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_config_report_rest_interceptors(null_interceptor): + transport = transports.ServiceManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceManagerRestInterceptor(), + ) + client = ServiceManagerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "post_generate_config_report") as post, \ + mock.patch.object(transports.ServiceManagerRestInterceptor, "pre_generate_config_report") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = servicemanager.GenerateConfigReportRequest.pb(servicemanager.GenerateConfigReportRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = servicemanager.GenerateConfigReportResponse.to_json(servicemanager.GenerateConfigReportResponse()) + req.return_value.content = return_value + + request = servicemanager.GenerateConfigReportRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = servicemanager.GenerateConfigReportResponse() + + client.generate_config_report(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'resource': 'services/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'resource': 'services/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'resource': 'services/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'resource': 'services/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'resource': 'services/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'resource': 'services/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_services_empty_call_rest(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + client.list_services(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.ListServicesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_service_empty_call_rest(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + client.get_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.GetServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_service_empty_call_rest(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_service), + '__call__') as call: + client.create_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.CreateServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_service_empty_call_rest(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_service), + '__call__') as call: + client.delete_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.DeleteServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_undelete_service_empty_call_rest(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.undelete_service), + '__call__') as call: + client.undelete_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.UndeleteServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_service_configs_empty_call_rest(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_service_configs), + '__call__') as call: + client.list_service_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.ListServiceConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_service_config_empty_call_rest(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_service_config), + '__call__') as call: + client.get_service_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.GetServiceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_service_config_empty_call_rest(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_service_config), + '__call__') as call: + client.create_service_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.CreateServiceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_submit_config_source_empty_call_rest(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.submit_config_source), + '__call__') as call: + client.submit_config_source(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.SubmitConfigSourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_service_rollouts_empty_call_rest(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_service_rollouts), + '__call__') as call: + client.list_service_rollouts(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.ListServiceRolloutsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_service_rollout_empty_call_rest(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_service_rollout), + '__call__') as call: + client.get_service_rollout(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.GetServiceRolloutRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_service_rollout_empty_call_rest(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_service_rollout), + '__call__') as call: + client.create_service_rollout(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.CreateServiceRolloutRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_config_report_empty_call_rest(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_config_report), + '__call__') as call: + client.generate_config_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = servicemanager.GenerateConfigReportRequest() + + assert args[0] == request_msg + + +def test_service_manager_rest_lro_client(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ServiceManagerGrpcTransport, + ) + +def test_service_manager_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ServiceManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_service_manager_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.servicemanagement_v1.services.service_manager.transports.ServiceManagerTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ServiceManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_services', + 'get_service', + 'create_service', + 'delete_service', + 'undelete_service', + 'list_service_configs', + 'get_service_config', + 'create_service_config', + 'submit_config_source', + 'list_service_rollouts', + 'get_service_rollout', + 'create_service_rollout', + 'generate_config_report', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_service_manager_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.servicemanagement_v1.services.service_manager.transports.ServiceManagerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServiceManagerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/service.management', + 'https://www.googleapis.com/auth/service.management.readonly', +), + quota_project_id="octopus", + ) + + +def test_service_manager_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.servicemanagement_v1.services.service_manager.transports.ServiceManagerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServiceManagerTransport() + adc.assert_called_once() + + +def test_service_manager_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ServiceManagerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/service.management', + 'https://www.googleapis.com/auth/service.management.readonly', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceManagerGrpcTransport, + transports.ServiceManagerGrpcAsyncIOTransport, + ], +) +def test_service_manager_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/service.management', 'https://www.googleapis.com/auth/service.management.readonly',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceManagerGrpcTransport, + transports.ServiceManagerGrpcAsyncIOTransport, + transports.ServiceManagerRestTransport, + ], +) +def test_service_manager_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ServiceManagerGrpcTransport, grpc_helpers), + (transports.ServiceManagerGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_service_manager_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "servicemanagement.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/service.management', + 'https://www.googleapis.com/auth/service.management.readonly', +), + scopes=["1", "2"], + default_host="servicemanagement.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.ServiceManagerGrpcTransport, transports.ServiceManagerGrpcAsyncIOTransport]) +def test_service_manager_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_service_manager_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ServiceManagerRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_service_manager_host_no_port(transport_name): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='servicemanagement.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'servicemanagement.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://servicemanagement.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_service_manager_host_with_port(transport_name): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='servicemanagement.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'servicemanagement.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://servicemanagement.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_service_manager_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ServiceManagerClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ServiceManagerClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_services._session + session2 = client2.transport.list_services._session + assert session1 != session2 + session1 = client1.transport.get_service._session + session2 = client2.transport.get_service._session + assert session1 != session2 + session1 = client1.transport.create_service._session + session2 = client2.transport.create_service._session + assert session1 != session2 + session1 = client1.transport.delete_service._session + session2 = client2.transport.delete_service._session + assert session1 != session2 + session1 = client1.transport.undelete_service._session + session2 = client2.transport.undelete_service._session + assert session1 != session2 + session1 = client1.transport.list_service_configs._session + session2 = client2.transport.list_service_configs._session + assert session1 != session2 + session1 = client1.transport.get_service_config._session + session2 = client2.transport.get_service_config._session + assert session1 != session2 + session1 = client1.transport.create_service_config._session + session2 = client2.transport.create_service_config._session + assert session1 != session2 + session1 = client1.transport.submit_config_source._session + session2 = client2.transport.submit_config_source._session + assert session1 != session2 + session1 = client1.transport.list_service_rollouts._session + session2 = client2.transport.list_service_rollouts._session + assert session1 != session2 + session1 = client1.transport.get_service_rollout._session + session2 = client2.transport.get_service_rollout._session + assert session1 != session2 + session1 = client1.transport.create_service_rollout._session + session2 = client2.transport.create_service_rollout._session + assert session1 != session2 + session1 = client1.transport.generate_config_report._session + session2 = client2.transport.generate_config_report._session + assert session1 != session2 +def test_service_manager_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ServiceManagerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_service_manager_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ServiceManagerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ServiceManagerGrpcTransport, transports.ServiceManagerGrpcAsyncIOTransport]) +def test_service_manager_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ServiceManagerGrpcTransport, transports.ServiceManagerGrpcAsyncIOTransport]) +def test_service_manager_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_service_manager_grpc_lro_client(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_service_manager_grpc_lro_async_client(): + client = ServiceManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ServiceManagerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ServiceManagerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceManagerClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = ServiceManagerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ServiceManagerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceManagerClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ServiceManagerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ServiceManagerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceManagerClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = ServiceManagerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ServiceManagerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceManagerClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ServiceManagerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ServiceManagerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceManagerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ServiceManagerTransport, '_prep_wrapped_messages') as prep: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ServiceManagerTransport, '_prep_wrapped_messages') as prep: + transport_class = ServiceManagerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_list_operations(transport: str = "grpc"): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + +def test_set_iam_policy_field_headers(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + +def test_set_iam_policy_from_dict(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + +def test_get_iam_policy(transport: str = "grpc"): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +def test_test_iam_permissions(transport: str = "grpc"): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = ServiceManagerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = ServiceManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ServiceManagerClient, transports.ServiceManagerGrpcTransport), + (ServiceManagerAsyncClient, transports.ServiceManagerGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/.coveragerc b/owl-bot-staging/google-cloud-service-usage/v1/.coveragerc new file mode 100644 index 000000000000..9a54970da734 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/service_usage/__init__.py + google/cloud/service_usage/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-cloud-service-usage/v1/.flake8 b/owl-bot-staging/google-cloud-service-usage/v1/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-cloud-service-usage/v1/MANIFEST.in b/owl-bot-staging/google-cloud-service-usage/v1/MANIFEST.in new file mode 100644 index 000000000000..b3777f7f7cae --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/service_usage *.py +recursive-include google/cloud/service_usage_v1 *.py diff --git a/owl-bot-staging/google-cloud-service-usage/v1/README.rst b/owl-bot-staging/google-cloud-service-usage/v1/README.rst new file mode 100644 index 000000000000..e4ac381b5846 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Service Usage API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Service Usage API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-service-usage/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-service-usage/v1/docs/_static/custom.css new file mode 100644 index 000000000000..06423be0b592 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/google-cloud-service-usage/v1/docs/conf.py b/owl-bot-staging/google-cloud-service-usage/v1/docs/conf.py new file mode 100644 index 000000000000..f41fd8db1369 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-service-usage documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-service-usage" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-service-usage-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-service-usage.tex", + u"google-cloud-service-usage Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-service-usage", + u"Google Cloud Service Usage Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-service-usage", + u"google-cloud-service-usage Documentation", + author, + "google-cloud-service-usage", + "GAPIC library for Google Cloud Service Usage API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-service-usage/v1/docs/index.rst b/owl-bot-staging/google-cloud-service-usage/v1/docs/index.rst new file mode 100644 index 000000000000..8ad5ee6ea11f --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + service_usage_v1/services_ + service_usage_v1/types_ diff --git a/owl-bot-staging/google-cloud-service-usage/v1/docs/service_usage_v1/service_usage.rst b/owl-bot-staging/google-cloud-service-usage/v1/docs/service_usage_v1/service_usage.rst new file mode 100644 index 000000000000..1be4cb087cf1 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/docs/service_usage_v1/service_usage.rst @@ -0,0 +1,10 @@ +ServiceUsage +------------------------------ + +.. automodule:: google.cloud.service_usage_v1.services.service_usage + :members: + :inherited-members: + +.. automodule:: google.cloud.service_usage_v1.services.service_usage.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-service-usage/v1/docs/service_usage_v1/services_.rst b/owl-bot-staging/google-cloud-service-usage/v1/docs/service_usage_v1/services_.rst new file mode 100644 index 000000000000..98a4c6d67a46 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/docs/service_usage_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Service Usage v1 API +============================================== +.. toctree:: + :maxdepth: 2 + + service_usage diff --git a/owl-bot-staging/google-cloud-service-usage/v1/docs/service_usage_v1/types_.rst b/owl-bot-staging/google-cloud-service-usage/v1/docs/service_usage_v1/types_.rst new file mode 100644 index 000000000000..fb0ef9128ea4 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/docs/service_usage_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Service Usage v1 API +=========================================== + +.. automodule:: google.cloud.service_usage_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage/__init__.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage/__init__.py new file mode 100644 index 000000000000..47804e8aac4c --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage/__init__.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.service_usage import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.service_usage_v1.services.service_usage.client import ServiceUsageClient +from google.cloud.service_usage_v1.services.service_usage.async_client import ServiceUsageAsyncClient + +from google.cloud.service_usage_v1.types.resources import OperationMetadata +from google.cloud.service_usage_v1.types.resources import Service +from google.cloud.service_usage_v1.types.resources import ServiceConfig +from google.cloud.service_usage_v1.types.resources import State +from google.cloud.service_usage_v1.types.serviceusage import BatchEnableServicesRequest +from google.cloud.service_usage_v1.types.serviceusage import BatchEnableServicesResponse +from google.cloud.service_usage_v1.types.serviceusage import BatchGetServicesRequest +from google.cloud.service_usage_v1.types.serviceusage import BatchGetServicesResponse +from google.cloud.service_usage_v1.types.serviceusage import DisableServiceRequest +from google.cloud.service_usage_v1.types.serviceusage import DisableServiceResponse +from google.cloud.service_usage_v1.types.serviceusage import EnableServiceRequest +from google.cloud.service_usage_v1.types.serviceusage import EnableServiceResponse +from google.cloud.service_usage_v1.types.serviceusage import GetServiceRequest +from google.cloud.service_usage_v1.types.serviceusage import ListServicesRequest +from google.cloud.service_usage_v1.types.serviceusage import ListServicesResponse + +__all__ = ('ServiceUsageClient', + 'ServiceUsageAsyncClient', + 'OperationMetadata', + 'Service', + 'ServiceConfig', + 'State', + 'BatchEnableServicesRequest', + 'BatchEnableServicesResponse', + 'BatchGetServicesRequest', + 'BatchGetServicesResponse', + 'DisableServiceRequest', + 'DisableServiceResponse', + 'EnableServiceRequest', + 'EnableServiceResponse', + 'GetServiceRequest', + 'ListServicesRequest', + 'ListServicesResponse', +) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage/gapic_version.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage/py.typed b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage/py.typed new file mode 100644 index 000000000000..89902483a6cf --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-service-usage package uses inline types. diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/__init__.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/__init__.py new file mode 100644 index 000000000000..9780a63dafb2 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/__init__.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.service_usage_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.service_usage import ServiceUsageClient +from .services.service_usage import ServiceUsageAsyncClient + +from .types.resources import OperationMetadata +from .types.resources import Service +from .types.resources import ServiceConfig +from .types.resources import State +from .types.serviceusage import BatchEnableServicesRequest +from .types.serviceusage import BatchEnableServicesResponse +from .types.serviceusage import BatchGetServicesRequest +from .types.serviceusage import BatchGetServicesResponse +from .types.serviceusage import DisableServiceRequest +from .types.serviceusage import DisableServiceResponse +from .types.serviceusage import EnableServiceRequest +from .types.serviceusage import EnableServiceResponse +from .types.serviceusage import GetServiceRequest +from .types.serviceusage import ListServicesRequest +from .types.serviceusage import ListServicesResponse + +__all__ = ( + 'ServiceUsageAsyncClient', +'BatchEnableServicesRequest', +'BatchEnableServicesResponse', +'BatchGetServicesRequest', +'BatchGetServicesResponse', +'DisableServiceRequest', +'DisableServiceResponse', +'EnableServiceRequest', +'EnableServiceResponse', +'GetServiceRequest', +'ListServicesRequest', +'ListServicesResponse', +'OperationMetadata', +'Service', +'ServiceConfig', +'ServiceUsageClient', +'State', +) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/gapic_metadata.json new file mode 100644 index 000000000000..587906d021ff --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/gapic_metadata.json @@ -0,0 +1,118 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.service_usage_v1", + "protoPackage": "google.api.serviceusage.v1", + "schema": "1.0", + "services": { + "ServiceUsage": { + "clients": { + "grpc": { + "libraryClient": "ServiceUsageClient", + "rpcs": { + "BatchEnableServices": { + "methods": [ + "batch_enable_services" + ] + }, + "BatchGetServices": { + "methods": [ + "batch_get_services" + ] + }, + "DisableService": { + "methods": [ + "disable_service" + ] + }, + "EnableService": { + "methods": [ + "enable_service" + ] + }, + "GetService": { + "methods": [ + "get_service" + ] + }, + "ListServices": { + "methods": [ + "list_services" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ServiceUsageAsyncClient", + "rpcs": { + "BatchEnableServices": { + "methods": [ + "batch_enable_services" + ] + }, + "BatchGetServices": { + "methods": [ + "batch_get_services" + ] + }, + "DisableService": { + "methods": [ + "disable_service" + ] + }, + "EnableService": { + "methods": [ + "enable_service" + ] + }, + "GetService": { + "methods": [ + "get_service" + ] + }, + "ListServices": { + "methods": [ + "list_services" + ] + } + } + }, + "rest": { + "libraryClient": "ServiceUsageClient", + "rpcs": { + "BatchEnableServices": { + "methods": [ + "batch_enable_services" + ] + }, + "BatchGetServices": { + "methods": [ + "batch_get_services" + ] + }, + "DisableService": { + "methods": [ + "disable_service" + ] + }, + "EnableService": { + "methods": [ + "enable_service" + ] + }, + "GetService": { + "methods": [ + "get_service" + ] + }, + "ListServices": { + "methods": [ + "list_services" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/gapic_version.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/py.typed b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/py.typed new file mode 100644 index 000000000000..89902483a6cf --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-service-usage package uses inline types. diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/__init__.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/__init__.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/__init__.py new file mode 100644 index 000000000000..92d74f66214f --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ServiceUsageClient +from .async_client import ServiceUsageAsyncClient + +__all__ = ( + 'ServiceUsageClient', + 'ServiceUsageAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/async_client.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/async_client.py new file mode 100644 index 000000000000..8e6fb3032dc1 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/async_client.py @@ -0,0 +1,920 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.service_usage_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.service_usage_v1.services.service_usage import pagers +from google.cloud.service_usage_v1.types import resources +from google.cloud.service_usage_v1.types import serviceusage +from google.longrunning import operations_pb2 # type: ignore +from .transports.base import ServiceUsageTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import ServiceUsageGrpcAsyncIOTransport +from .client import ServiceUsageClient + + +class ServiceUsageAsyncClient: + """Enables services that service consumers want to use on Google Cloud + Platform, lists the available or enabled services, or disables + services that service consumers no longer use. + + See `Service Usage + API `__ + """ + + _client: ServiceUsageClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ServiceUsageClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ServiceUsageClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ServiceUsageClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ServiceUsageClient._DEFAULT_UNIVERSE + + service_path = staticmethod(ServiceUsageClient.service_path) + parse_service_path = staticmethod(ServiceUsageClient.parse_service_path) + common_billing_account_path = staticmethod(ServiceUsageClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(ServiceUsageClient.parse_common_billing_account_path) + common_folder_path = staticmethod(ServiceUsageClient.common_folder_path) + parse_common_folder_path = staticmethod(ServiceUsageClient.parse_common_folder_path) + common_organization_path = staticmethod(ServiceUsageClient.common_organization_path) + parse_common_organization_path = staticmethod(ServiceUsageClient.parse_common_organization_path) + common_project_path = staticmethod(ServiceUsageClient.common_project_path) + parse_common_project_path = staticmethod(ServiceUsageClient.parse_common_project_path) + common_location_path = staticmethod(ServiceUsageClient.common_location_path) + parse_common_location_path = staticmethod(ServiceUsageClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceUsageAsyncClient: The constructed client. + """ + return ServiceUsageClient.from_service_account_info.__func__(ServiceUsageAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceUsageAsyncClient: The constructed client. + """ + return ServiceUsageClient.from_service_account_file.__func__(ServiceUsageAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ServiceUsageClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ServiceUsageTransport: + """Returns the transport used by the client instance. + + Returns: + ServiceUsageTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ServiceUsageClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ServiceUsageTransport, Callable[..., ServiceUsageTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the service usage async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ServiceUsageTransport,Callable[..., ServiceUsageTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ServiceUsageTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ServiceUsageClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def enable_service(self, + request: Optional[Union[serviceusage.EnableServiceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Enable a service so that it can be used with a + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import service_usage_v1 + + async def sample_enable_service(): + # Create a client + client = service_usage_v1.ServiceUsageAsyncClient() + + # Initialize request argument(s) + request = service_usage_v1.EnableServiceRequest( + ) + + # Make the request + operation = client.enable_service(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.service_usage_v1.types.EnableServiceRequest, dict]]): + The request object. Request message for the ``EnableService`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.service_usage_v1.types.EnableServiceResponse` Response message for the EnableService method. + This response message is assigned to the response + field of the returned Operation when that operation + is done. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, serviceusage.EnableServiceRequest): + request = serviceusage.EnableServiceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.enable_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + serviceusage.EnableServiceResponse, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + async def disable_service(self, + request: Optional[Union[serviceusage.DisableServiceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Disable a service so that it can no longer be used with a + project. This prevents unintended usage that may cause + unexpected billing charges or security leaks. + + It is not valid to call the disable method on a service that is + not currently enabled. Callers will receive a + ``FAILED_PRECONDITION`` status if the target service is not + currently enabled. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import service_usage_v1 + + async def sample_disable_service(): + # Create a client + client = service_usage_v1.ServiceUsageAsyncClient() + + # Initialize request argument(s) + request = service_usage_v1.DisableServiceRequest( + ) + + # Make the request + operation = client.disable_service(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.service_usage_v1.types.DisableServiceRequest, dict]]): + The request object. Request message for the ``DisableService`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.service_usage_v1.types.DisableServiceResponse` Response message for the DisableService method. + This response message is assigned to the response + field of the returned Operation when that operation + is done. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, serviceusage.DisableServiceRequest): + request = serviceusage.DisableServiceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.disable_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + serviceusage.DisableServiceResponse, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_service(self, + request: Optional[Union[serviceusage.GetServiceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Service: + r"""Returns the service configuration and enabled state + for a given service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import service_usage_v1 + + async def sample_get_service(): + # Create a client + client = service_usage_v1.ServiceUsageAsyncClient() + + # Initialize request argument(s) + request = service_usage_v1.GetServiceRequest( + ) + + # Make the request + response = await client.get_service(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.service_usage_v1.types.GetServiceRequest, dict]]): + The request object. Request message for the ``GetService`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.service_usage_v1.types.Service: + A service that is available for use + by the consumer. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, serviceusage.GetServiceRequest): + request = serviceusage.GetServiceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_services(self, + request: Optional[Union[serviceusage.ListServicesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServicesAsyncPager: + r"""List all services available to the specified project, and the + current state of those services with respect to the project. The + list includes all public services, all services for which the + calling user has the ``servicemanagement.services.bind`` + permission, and all services that have already been enabled on + the project. The list can be filtered to only include services + in a specific state, for example to only include services + enabled on the project. + + WARNING: If you need to query enabled services frequently or + across an organization, you should use `Cloud Asset Inventory + API `__, + which provides higher throughput and richer filtering + capability. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import service_usage_v1 + + async def sample_list_services(): + # Create a client + client = service_usage_v1.ServiceUsageAsyncClient() + + # Initialize request argument(s) + request = service_usage_v1.ListServicesRequest( + ) + + # Make the request + page_result = client.list_services(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.service_usage_v1.types.ListServicesRequest, dict]]): + The request object. Request message for the ``ListServices`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.service_usage_v1.services.service_usage.pagers.ListServicesAsyncPager: + Response message for the ListServices method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, serviceusage.ListServicesRequest): + request = serviceusage.ListServicesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_services] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListServicesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_enable_services(self, + request: Optional[Union[serviceusage.BatchEnableServicesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Enable multiple services on a project. The operation is atomic: + if enabling any service fails, then the entire batch fails, and + no state changes occur. To enable a single service, use the + ``EnableService`` method instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import service_usage_v1 + + async def sample_batch_enable_services(): + # Create a client + client = service_usage_v1.ServiceUsageAsyncClient() + + # Initialize request argument(s) + request = service_usage_v1.BatchEnableServicesRequest( + ) + + # Make the request + operation = client.batch_enable_services(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.service_usage_v1.types.BatchEnableServicesRequest, dict]]): + The request object. Request message for the ``BatchEnableServices`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.service_usage_v1.types.BatchEnableServicesResponse` Response message for the BatchEnableServices method. + This response message is assigned to the response + field of the returned Operation when that operation + is done. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, serviceusage.BatchEnableServicesRequest): + request = serviceusage.BatchEnableServicesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.batch_enable_services] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + serviceusage.BatchEnableServicesResponse, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + async def batch_get_services(self, + request: Optional[Union[serviceusage.BatchGetServicesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> serviceusage.BatchGetServicesResponse: + r"""Returns the service configurations and enabled states + for a given list of services. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import service_usage_v1 + + async def sample_batch_get_services(): + # Create a client + client = service_usage_v1.ServiceUsageAsyncClient() + + # Initialize request argument(s) + request = service_usage_v1.BatchGetServicesRequest( + ) + + # Make the request + response = await client.batch_get_services(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.service_usage_v1.types.BatchGetServicesRequest, dict]]): + The request object. Request message for the ``BatchGetServices`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.service_usage_v1.types.BatchGetServicesResponse: + Response message for the BatchGetServices method. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, serviceusage.BatchGetServicesRequest): + request = serviceusage.BatchGetServicesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.batch_get_services] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ServiceUsageAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ServiceUsageAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/client.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/client.py new file mode 100644 index 000000000000..ed98976f584a --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/client.py @@ -0,0 +1,1252 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.service_usage_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.service_usage_v1.services.service_usage import pagers +from google.cloud.service_usage_v1.types import resources +from google.cloud.service_usage_v1.types import serviceusage +from google.longrunning import operations_pb2 # type: ignore +from .transports.base import ServiceUsageTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import ServiceUsageGrpcTransport +from .transports.grpc_asyncio import ServiceUsageGrpcAsyncIOTransport +from .transports.rest import ServiceUsageRestTransport + + +class ServiceUsageClientMeta(type): + """Metaclass for the ServiceUsage client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ServiceUsageTransport]] + _transport_registry["grpc"] = ServiceUsageGrpcTransport + _transport_registry["grpc_asyncio"] = ServiceUsageGrpcAsyncIOTransport + _transport_registry["rest"] = ServiceUsageRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ServiceUsageTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ServiceUsageClient(metaclass=ServiceUsageClientMeta): + """Enables services that service consumers want to use on Google Cloud + Platform, lists the available or enabled services, or disables + services that service consumers no longer use. + + See `Service Usage + API `__ + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "serviceusage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "serviceusage.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceUsageClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceUsageClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ServiceUsageTransport: + """Returns the transport used by the client instance. + + Returns: + ServiceUsageTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def service_path(project: str,service: str,) -> str: + """Returns a fully-qualified service string.""" + return "projects/{project}/services/{service}".format(project=project, service=service, ) + + @staticmethod + def parse_service_path(path: str) -> Dict[str,str]: + """Parses a service path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/services/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = ServiceUsageClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = ServiceUsageClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ServiceUsageClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ServiceUsageClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ServiceUsageTransport, Callable[..., ServiceUsageTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the service usage client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ServiceUsageTransport,Callable[..., ServiceUsageTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ServiceUsageTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ServiceUsageClient._read_environment_variables() + self._client_cert_source = ServiceUsageClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = ServiceUsageClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ServiceUsageTransport) + if transport_provided: + # transport is a ServiceUsageTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ServiceUsageTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + ServiceUsageClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[ServiceUsageTransport], Callable[..., ServiceUsageTransport]] = ( + ServiceUsageClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ServiceUsageTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def enable_service(self, + request: Optional[Union[serviceusage.EnableServiceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Enable a service so that it can be used with a + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import service_usage_v1 + + def sample_enable_service(): + # Create a client + client = service_usage_v1.ServiceUsageClient() + + # Initialize request argument(s) + request = service_usage_v1.EnableServiceRequest( + ) + + # Make the request + operation = client.enable_service(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.service_usage_v1.types.EnableServiceRequest, dict]): + The request object. Request message for the ``EnableService`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.service_usage_v1.types.EnableServiceResponse` Response message for the EnableService method. + This response message is assigned to the response + field of the returned Operation when that operation + is done. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, serviceusage.EnableServiceRequest): + request = serviceusage.EnableServiceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.enable_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + serviceusage.EnableServiceResponse, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + def disable_service(self, + request: Optional[Union[serviceusage.DisableServiceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Disable a service so that it can no longer be used with a + project. This prevents unintended usage that may cause + unexpected billing charges or security leaks. + + It is not valid to call the disable method on a service that is + not currently enabled. Callers will receive a + ``FAILED_PRECONDITION`` status if the target service is not + currently enabled. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import service_usage_v1 + + def sample_disable_service(): + # Create a client + client = service_usage_v1.ServiceUsageClient() + + # Initialize request argument(s) + request = service_usage_v1.DisableServiceRequest( + ) + + # Make the request + operation = client.disable_service(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.service_usage_v1.types.DisableServiceRequest, dict]): + The request object. Request message for the ``DisableService`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.service_usage_v1.types.DisableServiceResponse` Response message for the DisableService method. + This response message is assigned to the response + field of the returned Operation when that operation + is done. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, serviceusage.DisableServiceRequest): + request = serviceusage.DisableServiceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.disable_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + serviceusage.DisableServiceResponse, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_service(self, + request: Optional[Union[serviceusage.GetServiceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Service: + r"""Returns the service configuration and enabled state + for a given service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import service_usage_v1 + + def sample_get_service(): + # Create a client + client = service_usage_v1.ServiceUsageClient() + + # Initialize request argument(s) + request = service_usage_v1.GetServiceRequest( + ) + + # Make the request + response = client.get_service(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.service_usage_v1.types.GetServiceRequest, dict]): + The request object. Request message for the ``GetService`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.service_usage_v1.types.Service: + A service that is available for use + by the consumer. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, serviceusage.GetServiceRequest): + request = serviceusage.GetServiceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_services(self, + request: Optional[Union[serviceusage.ListServicesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServicesPager: + r"""List all services available to the specified project, and the + current state of those services with respect to the project. The + list includes all public services, all services for which the + calling user has the ``servicemanagement.services.bind`` + permission, and all services that have already been enabled on + the project. The list can be filtered to only include services + in a specific state, for example to only include services + enabled on the project. + + WARNING: If you need to query enabled services frequently or + across an organization, you should use `Cloud Asset Inventory + API `__, + which provides higher throughput and richer filtering + capability. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import service_usage_v1 + + def sample_list_services(): + # Create a client + client = service_usage_v1.ServiceUsageClient() + + # Initialize request argument(s) + request = service_usage_v1.ListServicesRequest( + ) + + # Make the request + page_result = client.list_services(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.service_usage_v1.types.ListServicesRequest, dict]): + The request object. Request message for the ``ListServices`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.service_usage_v1.services.service_usage.pagers.ListServicesPager: + Response message for the ListServices method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, serviceusage.ListServicesRequest): + request = serviceusage.ListServicesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_services] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListServicesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_enable_services(self, + request: Optional[Union[serviceusage.BatchEnableServicesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Enable multiple services on a project. The operation is atomic: + if enabling any service fails, then the entire batch fails, and + no state changes occur. To enable a single service, use the + ``EnableService`` method instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import service_usage_v1 + + def sample_batch_enable_services(): + # Create a client + client = service_usage_v1.ServiceUsageClient() + + # Initialize request argument(s) + request = service_usage_v1.BatchEnableServicesRequest( + ) + + # Make the request + operation = client.batch_enable_services(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.service_usage_v1.types.BatchEnableServicesRequest, dict]): + The request object. Request message for the ``BatchEnableServices`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.service_usage_v1.types.BatchEnableServicesResponse` Response message for the BatchEnableServices method. + This response message is assigned to the response + field of the returned Operation when that operation + is done. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, serviceusage.BatchEnableServicesRequest): + request = serviceusage.BatchEnableServicesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_enable_services] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + serviceusage.BatchEnableServicesResponse, + metadata_type=resources.OperationMetadata, + ) + + # Done; return the response. + return response + + def batch_get_services(self, + request: Optional[Union[serviceusage.BatchGetServicesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> serviceusage.BatchGetServicesResponse: + r"""Returns the service configurations and enabled states + for a given list of services. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import service_usage_v1 + + def sample_batch_get_services(): + # Create a client + client = service_usage_v1.ServiceUsageClient() + + # Initialize request argument(s) + request = service_usage_v1.BatchGetServicesRequest( + ) + + # Make the request + response = client.batch_get_services(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.service_usage_v1.types.BatchGetServicesRequest, dict]): + The request object. Request message for the ``BatchGetServices`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.service_usage_v1.types.BatchGetServicesResponse: + Response message for the BatchGetServices method. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, serviceusage.BatchGetServicesRequest): + request = serviceusage.BatchGetServicesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_get_services] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ServiceUsageClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ServiceUsageClient", +) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/pagers.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/pagers.py new file mode 100644 index 000000000000..9003376528db --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/pagers.py @@ -0,0 +1,163 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.service_usage_v1.types import resources +from google.cloud.service_usage_v1.types import serviceusage + + +class ListServicesPager: + """A pager for iterating through ``list_services`` requests. + + This class thinly wraps an initial + :class:`google.cloud.service_usage_v1.types.ListServicesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``services`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListServices`` requests and continue to iterate + through the ``services`` field on the + corresponding responses. + + All the usual :class:`google.cloud.service_usage_v1.types.ListServicesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., serviceusage.ListServicesResponse], + request: serviceusage.ListServicesRequest, + response: serviceusage.ListServicesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.service_usage_v1.types.ListServicesRequest): + The initial request object. + response (google.cloud.service_usage_v1.types.ListServicesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = serviceusage.ListServicesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[serviceusage.ListServicesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Service]: + for page in self.pages: + yield from page.services + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListServicesAsyncPager: + """A pager for iterating through ``list_services`` requests. + + This class thinly wraps an initial + :class:`google.cloud.service_usage_v1.types.ListServicesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``services`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListServices`` requests and continue to iterate + through the ``services`` field on the + corresponding responses. + + All the usual :class:`google.cloud.service_usage_v1.types.ListServicesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[serviceusage.ListServicesResponse]], + request: serviceusage.ListServicesRequest, + response: serviceusage.ListServicesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.service_usage_v1.types.ListServicesRequest): + The initial request object. + response (google.cloud.service_usage_v1.types.ListServicesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = serviceusage.ListServicesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[serviceusage.ListServicesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.Service]: + async def async_generator(): + async for page in self.pages: + for response in page.services: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/README.rst b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/README.rst new file mode 100644 index 000000000000..d4409c843d5a --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`ServiceUsageTransport` is the ABC for all transports. +- public child `ServiceUsageGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `ServiceUsageGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseServiceUsageRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `ServiceUsageRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/__init__.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/__init__.py new file mode 100644 index 000000000000..d88e07f7828e --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ServiceUsageTransport +from .grpc import ServiceUsageGrpcTransport +from .grpc_asyncio import ServiceUsageGrpcAsyncIOTransport +from .rest import ServiceUsageRestTransport +from .rest import ServiceUsageRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ServiceUsageTransport]] +_transport_registry['grpc'] = ServiceUsageGrpcTransport +_transport_registry['grpc_asyncio'] = ServiceUsageGrpcAsyncIOTransport +_transport_registry['rest'] = ServiceUsageRestTransport + +__all__ = ( + 'ServiceUsageTransport', + 'ServiceUsageGrpcTransport', + 'ServiceUsageGrpcAsyncIOTransport', + 'ServiceUsageRestTransport', + 'ServiceUsageRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/base.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/base.py new file mode 100644 index 000000000000..3061ab036e47 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/base.py @@ -0,0 +1,262 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.service_usage_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.service_usage_v1.types import resources +from google.cloud.service_usage_v1.types import serviceusage +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ServiceUsageTransport(abc.ABC): + """Abstract transport class for ServiceUsage.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/service.management', + ) + + DEFAULT_HOST: str = 'serviceusage.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'serviceusage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.enable_service: gapic_v1.method.wrap_method( + self.enable_service, + default_timeout=None, + client_info=client_info, + ), + self.disable_service: gapic_v1.method.wrap_method( + self.disable_service, + default_timeout=None, + client_info=client_info, + ), + self.get_service: gapic_v1.method.wrap_method( + self.get_service, + default_timeout=None, + client_info=client_info, + ), + self.list_services: gapic_v1.method.wrap_method( + self.list_services, + default_timeout=None, + client_info=client_info, + ), + self.batch_enable_services: gapic_v1.method.wrap_method( + self.batch_enable_services, + default_timeout=None, + client_info=client_info, + ), + self.batch_get_services: gapic_v1.method.wrap_method( + self.batch_get_services, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def enable_service(self) -> Callable[ + [serviceusage.EnableServiceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def disable_service(self) -> Callable[ + [serviceusage.DisableServiceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_service(self) -> Callable[ + [serviceusage.GetServiceRequest], + Union[ + resources.Service, + Awaitable[resources.Service] + ]]: + raise NotImplementedError() + + @property + def list_services(self) -> Callable[ + [serviceusage.ListServicesRequest], + Union[ + serviceusage.ListServicesResponse, + Awaitable[serviceusage.ListServicesResponse] + ]]: + raise NotImplementedError() + + @property + def batch_enable_services(self) -> Callable[ + [serviceusage.BatchEnableServicesRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def batch_get_services(self) -> Callable[ + [serviceusage.BatchGetServicesRequest], + Union[ + serviceusage.BatchGetServicesResponse, + Awaitable[serviceusage.BatchGetServicesResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'ServiceUsageTransport', +) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/grpc.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/grpc.py new file mode 100644 index 000000000000..d03f89794ba1 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/grpc.py @@ -0,0 +1,487 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.service_usage_v1.types import resources +from google.cloud.service_usage_v1.types import serviceusage +from google.longrunning import operations_pb2 # type: ignore +from .base import ServiceUsageTransport, DEFAULT_CLIENT_INFO + + +class ServiceUsageGrpcTransport(ServiceUsageTransport): + """gRPC backend transport for ServiceUsage. + + Enables services that service consumers want to use on Google Cloud + Platform, lists the available or enabled services, or disables + services that service consumers no longer use. + + See `Service Usage + API `__ + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'serviceusage.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'serviceusage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'serviceusage.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def enable_service(self) -> Callable[ + [serviceusage.EnableServiceRequest], + operations_pb2.Operation]: + r"""Return a callable for the enable service method over gRPC. + + Enable a service so that it can be used with a + project. + + Returns: + Callable[[~.EnableServiceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'enable_service' not in self._stubs: + self._stubs['enable_service'] = self.grpc_channel.unary_unary( + '/google.api.serviceusage.v1.ServiceUsage/EnableService', + request_serializer=serviceusage.EnableServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['enable_service'] + + @property + def disable_service(self) -> Callable[ + [serviceusage.DisableServiceRequest], + operations_pb2.Operation]: + r"""Return a callable for the disable service method over gRPC. + + Disable a service so that it can no longer be used with a + project. This prevents unintended usage that may cause + unexpected billing charges or security leaks. + + It is not valid to call the disable method on a service that is + not currently enabled. Callers will receive a + ``FAILED_PRECONDITION`` status if the target service is not + currently enabled. + + Returns: + Callable[[~.DisableServiceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'disable_service' not in self._stubs: + self._stubs['disable_service'] = self.grpc_channel.unary_unary( + '/google.api.serviceusage.v1.ServiceUsage/DisableService', + request_serializer=serviceusage.DisableServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['disable_service'] + + @property + def get_service(self) -> Callable[ + [serviceusage.GetServiceRequest], + resources.Service]: + r"""Return a callable for the get service method over gRPC. + + Returns the service configuration and enabled state + for a given service. + + Returns: + Callable[[~.GetServiceRequest], + ~.Service]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_service' not in self._stubs: + self._stubs['get_service'] = self.grpc_channel.unary_unary( + '/google.api.serviceusage.v1.ServiceUsage/GetService', + request_serializer=serviceusage.GetServiceRequest.serialize, + response_deserializer=resources.Service.deserialize, + ) + return self._stubs['get_service'] + + @property + def list_services(self) -> Callable[ + [serviceusage.ListServicesRequest], + serviceusage.ListServicesResponse]: + r"""Return a callable for the list services method over gRPC. + + List all services available to the specified project, and the + current state of those services with respect to the project. The + list includes all public services, all services for which the + calling user has the ``servicemanagement.services.bind`` + permission, and all services that have already been enabled on + the project. The list can be filtered to only include services + in a specific state, for example to only include services + enabled on the project. + + WARNING: If you need to query enabled services frequently or + across an organization, you should use `Cloud Asset Inventory + API `__, + which provides higher throughput and richer filtering + capability. + + Returns: + Callable[[~.ListServicesRequest], + ~.ListServicesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_services' not in self._stubs: + self._stubs['list_services'] = self.grpc_channel.unary_unary( + '/google.api.serviceusage.v1.ServiceUsage/ListServices', + request_serializer=serviceusage.ListServicesRequest.serialize, + response_deserializer=serviceusage.ListServicesResponse.deserialize, + ) + return self._stubs['list_services'] + + @property + def batch_enable_services(self) -> Callable[ + [serviceusage.BatchEnableServicesRequest], + operations_pb2.Operation]: + r"""Return a callable for the batch enable services method over gRPC. + + Enable multiple services on a project. The operation is atomic: + if enabling any service fails, then the entire batch fails, and + no state changes occur. To enable a single service, use the + ``EnableService`` method instead. + + Returns: + Callable[[~.BatchEnableServicesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_enable_services' not in self._stubs: + self._stubs['batch_enable_services'] = self.grpc_channel.unary_unary( + '/google.api.serviceusage.v1.ServiceUsage/BatchEnableServices', + request_serializer=serviceusage.BatchEnableServicesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['batch_enable_services'] + + @property + def batch_get_services(self) -> Callable[ + [serviceusage.BatchGetServicesRequest], + serviceusage.BatchGetServicesResponse]: + r"""Return a callable for the batch get services method over gRPC. + + Returns the service configurations and enabled states + for a given list of services. + + Returns: + Callable[[~.BatchGetServicesRequest], + ~.BatchGetServicesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_get_services' not in self._stubs: + self._stubs['batch_get_services'] = self.grpc_channel.unary_unary( + '/google.api.serviceusage.v1.ServiceUsage/BatchGetServices', + request_serializer=serviceusage.BatchGetServicesRequest.serialize, + response_deserializer=serviceusage.BatchGetServicesResponse.deserialize, + ) + return self._stubs['batch_get_services'] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'ServiceUsageGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a19a0b01b33c --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/grpc_asyncio.py @@ -0,0 +1,543 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.service_usage_v1.types import resources +from google.cloud.service_usage_v1.types import serviceusage +from google.longrunning import operations_pb2 # type: ignore +from .base import ServiceUsageTransport, DEFAULT_CLIENT_INFO +from .grpc import ServiceUsageGrpcTransport + + +class ServiceUsageGrpcAsyncIOTransport(ServiceUsageTransport): + """gRPC AsyncIO backend transport for ServiceUsage. + + Enables services that service consumers want to use on Google Cloud + Platform, lists the available or enabled services, or disables + services that service consumers no longer use. + + See `Service Usage + API `__ + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'serviceusage.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'serviceusage.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'serviceusage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def enable_service(self) -> Callable[ + [serviceusage.EnableServiceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the enable service method over gRPC. + + Enable a service so that it can be used with a + project. + + Returns: + Callable[[~.EnableServiceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'enable_service' not in self._stubs: + self._stubs['enable_service'] = self.grpc_channel.unary_unary( + '/google.api.serviceusage.v1.ServiceUsage/EnableService', + request_serializer=serviceusage.EnableServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['enable_service'] + + @property + def disable_service(self) -> Callable[ + [serviceusage.DisableServiceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the disable service method over gRPC. + + Disable a service so that it can no longer be used with a + project. This prevents unintended usage that may cause + unexpected billing charges or security leaks. + + It is not valid to call the disable method on a service that is + not currently enabled. Callers will receive a + ``FAILED_PRECONDITION`` status if the target service is not + currently enabled. + + Returns: + Callable[[~.DisableServiceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'disable_service' not in self._stubs: + self._stubs['disable_service'] = self.grpc_channel.unary_unary( + '/google.api.serviceusage.v1.ServiceUsage/DisableService', + request_serializer=serviceusage.DisableServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['disable_service'] + + @property + def get_service(self) -> Callable[ + [serviceusage.GetServiceRequest], + Awaitable[resources.Service]]: + r"""Return a callable for the get service method over gRPC. + + Returns the service configuration and enabled state + for a given service. + + Returns: + Callable[[~.GetServiceRequest], + Awaitable[~.Service]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_service' not in self._stubs: + self._stubs['get_service'] = self.grpc_channel.unary_unary( + '/google.api.serviceusage.v1.ServiceUsage/GetService', + request_serializer=serviceusage.GetServiceRequest.serialize, + response_deserializer=resources.Service.deserialize, + ) + return self._stubs['get_service'] + + @property + def list_services(self) -> Callable[ + [serviceusage.ListServicesRequest], + Awaitable[serviceusage.ListServicesResponse]]: + r"""Return a callable for the list services method over gRPC. + + List all services available to the specified project, and the + current state of those services with respect to the project. The + list includes all public services, all services for which the + calling user has the ``servicemanagement.services.bind`` + permission, and all services that have already been enabled on + the project. The list can be filtered to only include services + in a specific state, for example to only include services + enabled on the project. + + WARNING: If you need to query enabled services frequently or + across an organization, you should use `Cloud Asset Inventory + API `__, + which provides higher throughput and richer filtering + capability. + + Returns: + Callable[[~.ListServicesRequest], + Awaitable[~.ListServicesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_services' not in self._stubs: + self._stubs['list_services'] = self.grpc_channel.unary_unary( + '/google.api.serviceusage.v1.ServiceUsage/ListServices', + request_serializer=serviceusage.ListServicesRequest.serialize, + response_deserializer=serviceusage.ListServicesResponse.deserialize, + ) + return self._stubs['list_services'] + + @property + def batch_enable_services(self) -> Callable[ + [serviceusage.BatchEnableServicesRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the batch enable services method over gRPC. + + Enable multiple services on a project. The operation is atomic: + if enabling any service fails, then the entire batch fails, and + no state changes occur. To enable a single service, use the + ``EnableService`` method instead. + + Returns: + Callable[[~.BatchEnableServicesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_enable_services' not in self._stubs: + self._stubs['batch_enable_services'] = self.grpc_channel.unary_unary( + '/google.api.serviceusage.v1.ServiceUsage/BatchEnableServices', + request_serializer=serviceusage.BatchEnableServicesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['batch_enable_services'] + + @property + def batch_get_services(self) -> Callable[ + [serviceusage.BatchGetServicesRequest], + Awaitable[serviceusage.BatchGetServicesResponse]]: + r"""Return a callable for the batch get services method over gRPC. + + Returns the service configurations and enabled states + for a given list of services. + + Returns: + Callable[[~.BatchGetServicesRequest], + Awaitable[~.BatchGetServicesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_get_services' not in self._stubs: + self._stubs['batch_get_services'] = self.grpc_channel.unary_unary( + '/google.api.serviceusage.v1.ServiceUsage/BatchGetServices', + request_serializer=serviceusage.BatchGetServicesRequest.serialize, + response_deserializer=serviceusage.BatchGetServicesResponse.deserialize, + ) + return self._stubs['batch_get_services'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.enable_service: self._wrap_method( + self.enable_service, + default_timeout=None, + client_info=client_info, + ), + self.disable_service: self._wrap_method( + self.disable_service, + default_timeout=None, + client_info=client_info, + ), + self.get_service: self._wrap_method( + self.get_service, + default_timeout=None, + client_info=client_info, + ), + self.list_services: self._wrap_method( + self.list_services, + default_timeout=None, + client_info=client_info, + ), + self.batch_enable_services: self._wrap_method( + self.batch_enable_services, + default_timeout=None, + client_info=client_info, + ), + self.batch_get_services: self._wrap_method( + self.batch_get_services, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ( + 'ServiceUsageGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/rest.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/rest.py new file mode 100644 index 000000000000..371381aecf3b --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/rest.py @@ -0,0 +1,1040 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.service_usage_v1.types import resources +from google.cloud.service_usage_v1.types import serviceusage +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseServiceUsageRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class ServiceUsageRestInterceptor: + """Interceptor for ServiceUsage. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ServiceUsageRestTransport. + + .. code-block:: python + class MyCustomServiceUsageInterceptor(ServiceUsageRestInterceptor): + def pre_batch_enable_services(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_enable_services(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_batch_get_services(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_get_services(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_disable_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_disable_service(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_enable_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_enable_service(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_service(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_services(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_services(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ServiceUsageRestTransport(interceptor=MyCustomServiceUsageInterceptor()) + client = ServiceUsageClient(transport=transport) + + + """ + def pre_batch_enable_services(self, request: serviceusage.BatchEnableServicesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[serviceusage.BatchEnableServicesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_enable_services + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceUsage server. + """ + return request, metadata + + def post_batch_enable_services(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for batch_enable_services + + Override in a subclass to manipulate the response + after it is returned by the ServiceUsage server but before + it is returned to user code. + """ + return response + + def pre_batch_get_services(self, request: serviceusage.BatchGetServicesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[serviceusage.BatchGetServicesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_get_services + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceUsage server. + """ + return request, metadata + + def post_batch_get_services(self, response: serviceusage.BatchGetServicesResponse) -> serviceusage.BatchGetServicesResponse: + """Post-rpc interceptor for batch_get_services + + Override in a subclass to manipulate the response + after it is returned by the ServiceUsage server but before + it is returned to user code. + """ + return response + + def pre_disable_service(self, request: serviceusage.DisableServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[serviceusage.DisableServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for disable_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceUsage server. + """ + return request, metadata + + def post_disable_service(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for disable_service + + Override in a subclass to manipulate the response + after it is returned by the ServiceUsage server but before + it is returned to user code. + """ + return response + + def pre_enable_service(self, request: serviceusage.EnableServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[serviceusage.EnableServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for enable_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceUsage server. + """ + return request, metadata + + def post_enable_service(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for enable_service + + Override in a subclass to manipulate the response + after it is returned by the ServiceUsage server but before + it is returned to user code. + """ + return response + + def pre_get_service(self, request: serviceusage.GetServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[serviceusage.GetServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceUsage server. + """ + return request, metadata + + def post_get_service(self, response: resources.Service) -> resources.Service: + """Post-rpc interceptor for get_service + + Override in a subclass to manipulate the response + after it is returned by the ServiceUsage server but before + it is returned to user code. + """ + return response + + def pre_list_services(self, request: serviceusage.ListServicesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[serviceusage.ListServicesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_services + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceUsage server. + """ + return request, metadata + + def post_list_services(self, response: serviceusage.ListServicesResponse) -> serviceusage.ListServicesResponse: + """Post-rpc interceptor for list_services + + Override in a subclass to manipulate the response + after it is returned by the ServiceUsage server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceUsage server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ServiceUsage server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceUsage server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ServiceUsage server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ServiceUsageRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ServiceUsageRestInterceptor + + +class ServiceUsageRestTransport(_BaseServiceUsageRestTransport): + """REST backend synchronous transport for ServiceUsage. + + Enables services that service consumers want to use on Google Cloud + Platform, lists the available or enabled services, or disables + services that service consumers no longer use. + + See `Service Usage + API `__ + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'serviceusage.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[ServiceUsageRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'serviceusage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ServiceUsageRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/operations', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _BatchEnableServices(_BaseServiceUsageRestTransport._BaseBatchEnableServices, ServiceUsageRestStub): + def __hash__(self): + return hash("ServiceUsageRestTransport.BatchEnableServices") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: serviceusage.BatchEnableServicesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the batch enable services method over HTTP. + + Args: + request (~.serviceusage.BatchEnableServicesRequest): + The request object. Request message for the ``BatchEnableServices`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseServiceUsageRestTransport._BaseBatchEnableServices._get_http_options() + request, metadata = self._interceptor.pre_batch_enable_services(request, metadata) + transcoded_request = _BaseServiceUsageRestTransport._BaseBatchEnableServices._get_transcoded_request(http_options, request) + + body = _BaseServiceUsageRestTransport._BaseBatchEnableServices._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseServiceUsageRestTransport._BaseBatchEnableServices._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceUsageRestTransport._BatchEnableServices._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_enable_services(resp) + return resp + + class _BatchGetServices(_BaseServiceUsageRestTransport._BaseBatchGetServices, ServiceUsageRestStub): + def __hash__(self): + return hash("ServiceUsageRestTransport.BatchGetServices") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: serviceusage.BatchGetServicesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> serviceusage.BatchGetServicesResponse: + r"""Call the batch get services method over HTTP. + + Args: + request (~.serviceusage.BatchGetServicesRequest): + The request object. Request message for the ``BatchGetServices`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.serviceusage.BatchGetServicesResponse: + Response message for the ``BatchGetServices`` method. + """ + + http_options = _BaseServiceUsageRestTransport._BaseBatchGetServices._get_http_options() + request, metadata = self._interceptor.pre_batch_get_services(request, metadata) + transcoded_request = _BaseServiceUsageRestTransport._BaseBatchGetServices._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseServiceUsageRestTransport._BaseBatchGetServices._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceUsageRestTransport._BatchGetServices._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = serviceusage.BatchGetServicesResponse() + pb_resp = serviceusage.BatchGetServicesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_services(resp) + return resp + + class _DisableService(_BaseServiceUsageRestTransport._BaseDisableService, ServiceUsageRestStub): + def __hash__(self): + return hash("ServiceUsageRestTransport.DisableService") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: serviceusage.DisableServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the disable service method over HTTP. + + Args: + request (~.serviceusage.DisableServiceRequest): + The request object. Request message for the ``DisableService`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseServiceUsageRestTransport._BaseDisableService._get_http_options() + request, metadata = self._interceptor.pre_disable_service(request, metadata) + transcoded_request = _BaseServiceUsageRestTransport._BaseDisableService._get_transcoded_request(http_options, request) + + body = _BaseServiceUsageRestTransport._BaseDisableService._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseServiceUsageRestTransport._BaseDisableService._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceUsageRestTransport._DisableService._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_disable_service(resp) + return resp + + class _EnableService(_BaseServiceUsageRestTransport._BaseEnableService, ServiceUsageRestStub): + def __hash__(self): + return hash("ServiceUsageRestTransport.EnableService") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: serviceusage.EnableServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the enable service method over HTTP. + + Args: + request (~.serviceusage.EnableServiceRequest): + The request object. Request message for the ``EnableService`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseServiceUsageRestTransport._BaseEnableService._get_http_options() + request, metadata = self._interceptor.pre_enable_service(request, metadata) + transcoded_request = _BaseServiceUsageRestTransport._BaseEnableService._get_transcoded_request(http_options, request) + + body = _BaseServiceUsageRestTransport._BaseEnableService._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseServiceUsageRestTransport._BaseEnableService._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceUsageRestTransport._EnableService._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_enable_service(resp) + return resp + + class _GetService(_BaseServiceUsageRestTransport._BaseGetService, ServiceUsageRestStub): + def __hash__(self): + return hash("ServiceUsageRestTransport.GetService") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: serviceusage.GetServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> resources.Service: + r"""Call the get service method over HTTP. + + Args: + request (~.serviceusage.GetServiceRequest): + The request object. Request message for the ``GetService`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Service: + A service that is available for use + by the consumer. + + """ + + http_options = _BaseServiceUsageRestTransport._BaseGetService._get_http_options() + request, metadata = self._interceptor.pre_get_service(request, metadata) + transcoded_request = _BaseServiceUsageRestTransport._BaseGetService._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseServiceUsageRestTransport._BaseGetService._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceUsageRestTransport._GetService._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Service() + pb_resp = resources.Service.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_service(resp) + return resp + + class _ListServices(_BaseServiceUsageRestTransport._BaseListServices, ServiceUsageRestStub): + def __hash__(self): + return hash("ServiceUsageRestTransport.ListServices") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: serviceusage.ListServicesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> serviceusage.ListServicesResponse: + r"""Call the list services method over HTTP. + + Args: + request (~.serviceusage.ListServicesRequest): + The request object. Request message for the ``ListServices`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.serviceusage.ListServicesResponse: + Response message for the ``ListServices`` method. + """ + + http_options = _BaseServiceUsageRestTransport._BaseListServices._get_http_options() + request, metadata = self._interceptor.pre_list_services(request, metadata) + transcoded_request = _BaseServiceUsageRestTransport._BaseListServices._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseServiceUsageRestTransport._BaseListServices._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceUsageRestTransport._ListServices._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = serviceusage.ListServicesResponse() + pb_resp = serviceusage.ListServicesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_services(resp) + return resp + + @property + def batch_enable_services(self) -> Callable[ + [serviceusage.BatchEnableServicesRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchEnableServices(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_get_services(self) -> Callable[ + [serviceusage.BatchGetServicesRequest], + serviceusage.BatchGetServicesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchGetServices(self._session, self._host, self._interceptor) # type: ignore + + @property + def disable_service(self) -> Callable[ + [serviceusage.DisableServiceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DisableService(self._session, self._host, self._interceptor) # type: ignore + + @property + def enable_service(self) -> Callable[ + [serviceusage.EnableServiceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnableService(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_service(self) -> Callable[ + [serviceusage.GetServiceRequest], + resources.Service]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetService(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_services(self) -> Callable[ + [serviceusage.ListServicesRequest], + serviceusage.ListServicesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListServices(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseServiceUsageRestTransport._BaseGetOperation, ServiceUsageRestStub): + def __hash__(self): + return hash("ServiceUsageRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseServiceUsageRestTransport._BaseGetOperation._get_http_options() + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseServiceUsageRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseServiceUsageRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceUsageRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseServiceUsageRestTransport._BaseListOperations, ServiceUsageRestStub): + def __hash__(self): + return hash("ServiceUsageRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseServiceUsageRestTransport._BaseListOperations._get_http_options() + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseServiceUsageRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseServiceUsageRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + # Send the request + response = ServiceUsageRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ServiceUsageRestTransport', +) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/rest_base.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/rest_base.py new file mode 100644 index 000000000000..27235ab263a1 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/services/service_usage/transports/rest_base.py @@ -0,0 +1,347 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import ServiceUsageTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.service_usage_v1.types import resources +from google.cloud.service_usage_v1.types import serviceusage +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseServiceUsageRestTransport(ServiceUsageTransport): + """Base REST backend transport for ServiceUsage. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'serviceusage.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'serviceusage.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseBatchEnableServices: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=*/*}/services:batchEnable', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = serviceusage.BatchEnableServicesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseBatchGetServices: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=*/*}/services:batchGet', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = serviceusage.BatchGetServicesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDisableService: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=*/*/services/*}:disable', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = serviceusage.DisableServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseEnableService: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=*/*/services/*}:enable', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = serviceusage.EnableServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetService: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=*/*/services/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = serviceusage.GetServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListServices: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=*/*}/services', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = serviceusage.ListServicesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseServiceUsageRestTransport', +) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/types/__init__.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/types/__init__.py new file mode 100644 index 000000000000..0382be309f5b --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/types/__init__.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .resources import ( + OperationMetadata, + Service, + ServiceConfig, + State, +) +from .serviceusage import ( + BatchEnableServicesRequest, + BatchEnableServicesResponse, + BatchGetServicesRequest, + BatchGetServicesResponse, + DisableServiceRequest, + DisableServiceResponse, + EnableServiceRequest, + EnableServiceResponse, + GetServiceRequest, + ListServicesRequest, + ListServicesResponse, +) + +__all__ = ( + 'OperationMetadata', + 'Service', + 'ServiceConfig', + 'State', + 'BatchEnableServicesRequest', + 'BatchEnableServicesResponse', + 'BatchGetServicesRequest', + 'BatchGetServicesResponse', + 'DisableServiceRequest', + 'DisableServiceResponse', + 'EnableServiceRequest', + 'EnableServiceResponse', + 'GetServiceRequest', + 'ListServicesRequest', + 'ListServicesResponse', +) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/types/resources.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/types/resources.py new file mode 100644 index 000000000000..5f6681b5d245 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/types/resources.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.api import auth_pb2 # type: ignore +from google.api import documentation_pb2 # type: ignore +from google.api import endpoint_pb2 # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.api import monitoring_pb2 # type: ignore +from google.api import quota_pb2 # type: ignore +from google.api import usage_pb2 # type: ignore +from google.protobuf import api_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.api.serviceusage.v1', + manifest={ + 'State', + 'Service', + 'ServiceConfig', + 'OperationMetadata', + }, +) + + +class State(proto.Enum): + r"""Whether or not a service has been enabled for use by a + consumer. + + Values: + STATE_UNSPECIFIED (0): + The default value, which indicates that the + enabled state of the service is unspecified or + not meaningful. Currently, all consumers other + than projects (such as folders and + organizations) are always in this state. + DISABLED (1): + The service cannot be used by this consumer. + It has either been explicitly disabled, or has + never been enabled. + ENABLED (2): + The service has been explicitly enabled for + use by this consumer. + """ + STATE_UNSPECIFIED = 0 + DISABLED = 1 + ENABLED = 2 + + +class Service(proto.Message): + r"""A service that is available for use by the consumer. + + Attributes: + name (str): + The resource name of the consumer and + service. + A valid name would be: + + - + projects/123/services/serviceusage.googleapis.com + parent (str): + The resource name of the consumer. + + A valid name would be: + + - projects/123 + config (google.cloud.service_usage_v1.types.ServiceConfig): + The service configuration of the available service. Some + fields may be filtered out of the configuration in responses + to the ``ListServices`` method. These fields are present + only in responses to the ``GetService`` method. + state (google.cloud.service_usage_v1.types.State): + Whether or not the service has been enabled + for use by the consumer. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + parent: str = proto.Field( + proto.STRING, + number=5, + ) + config: 'ServiceConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='ServiceConfig', + ) + state: 'State' = proto.Field( + proto.ENUM, + number=4, + enum='State', + ) + + +class ServiceConfig(proto.Message): + r"""The configuration of the service. + + Attributes: + name (str): + The DNS address at which this service is available. + + An example DNS address would be: + ``calendar.googleapis.com``. + title (str): + The product title for this service. + apis (MutableSequence[google.protobuf.api_pb2.Api]): + A list of API interfaces exported by this + service. Contains only the names, versions, and + method names of the interfaces. + documentation (google.api.documentation_pb2.Documentation): + Additional API documentation. Contains only + the summary and the documentation URL. + quota (google.api.quota_pb2.Quota): + Quota configuration. + authentication (google.api.auth_pb2.Authentication): + Auth configuration. Contains only the OAuth + rules. + usage (google.api.usage_pb2.Usage): + Configuration controlling usage of this + service. + endpoints (MutableSequence[google.api.endpoint_pb2.Endpoint]): + Configuration for network endpoints. Contains + only the names and aliases of the endpoints. + monitored_resources (MutableSequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): + Defines the monitored resources used by this service. This + is required by the + [Service.monitoring][google.api.Service.monitoring] and + [Service.logging][google.api.Service.logging] + configurations. + monitoring (google.api.monitoring_pb2.Monitoring): + Monitoring configuration. This should not include the + 'producer_destinations' field. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + title: str = proto.Field( + proto.STRING, + number=2, + ) + apis: MutableSequence[api_pb2.Api] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=api_pb2.Api, + ) + documentation: documentation_pb2.Documentation = proto.Field( + proto.MESSAGE, + number=6, + message=documentation_pb2.Documentation, + ) + quota: quota_pb2.Quota = proto.Field( + proto.MESSAGE, + number=10, + message=quota_pb2.Quota, + ) + authentication: auth_pb2.Authentication = proto.Field( + proto.MESSAGE, + number=11, + message=auth_pb2.Authentication, + ) + usage: usage_pb2.Usage = proto.Field( + proto.MESSAGE, + number=15, + message=usage_pb2.Usage, + ) + endpoints: MutableSequence[endpoint_pb2.Endpoint] = proto.RepeatedField( + proto.MESSAGE, + number=18, + message=endpoint_pb2.Endpoint, + ) + monitored_resources: MutableSequence[monitored_resource_pb2.MonitoredResourceDescriptor] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message=monitored_resource_pb2.MonitoredResourceDescriptor, + ) + monitoring: monitoring_pb2.Monitoring = proto.Field( + proto.MESSAGE, + number=28, + message=monitoring_pb2.Monitoring, + ) + + +class OperationMetadata(proto.Message): + r"""The operation metadata returned for the batchend services + operation. + + Attributes: + resource_names (MutableSequence[str]): + The full name of the resources that this + operation is directly associated with. + """ + + resource_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/types/serviceusage.py b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/types/serviceusage.py new file mode 100644 index 000000000000..9158bdb0863c --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/google/cloud/service_usage_v1/types/serviceusage.py @@ -0,0 +1,377 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.service_usage_v1.types import resources + + +__protobuf__ = proto.module( + package='google.api.serviceusage.v1', + manifest={ + 'EnableServiceRequest', + 'EnableServiceResponse', + 'DisableServiceRequest', + 'DisableServiceResponse', + 'GetServiceRequest', + 'ListServicesRequest', + 'ListServicesResponse', + 'BatchEnableServicesRequest', + 'BatchEnableServicesResponse', + 'BatchGetServicesRequest', + 'BatchGetServicesResponse', + }, +) + + +class EnableServiceRequest(proto.Message): + r"""Request message for the ``EnableService`` method. + + Attributes: + name (str): + Name of the consumer and service to enable the service on. + + The ``EnableService`` and ``DisableService`` methods + currently only support projects. + + Enabling a service requires that the service is public or is + shared with the user enabling the service. + + An example name would be: + ``projects/123/services/serviceusage.googleapis.com`` where + ``123`` is the project number. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class EnableServiceResponse(proto.Message): + r"""Response message for the ``EnableService`` method. This response + message is assigned to the ``response`` field of the returned + Operation when that operation is done. + + Attributes: + service (google.cloud.service_usage_v1.types.Service): + The new state of the service after enabling. + """ + + service: resources.Service = proto.Field( + proto.MESSAGE, + number=1, + message=resources.Service, + ) + + +class DisableServiceRequest(proto.Message): + r"""Request message for the ``DisableService`` method. + + Attributes: + name (str): + Name of the consumer and service to disable the service on. + + The enable and disable methods currently only support + projects. + + An example name would be: + ``projects/123/services/serviceusage.googleapis.com`` where + ``123`` is the project number. + disable_dependent_services (bool): + Indicates if services that are enabled and + which depend on this service should also be + disabled. If not set, an error will be generated + if any enabled services depend on the service to + be disabled. When set, the service, and any + enabled services that depend on it, will be + disabled together. + check_if_service_has_usage (google.cloud.service_usage_v1.types.DisableServiceRequest.CheckIfServiceHasUsage): + Defines the behavior for checking service + usage when disabling a service. + """ + class CheckIfServiceHasUsage(proto.Enum): + r"""Enum to determine if service usage should be checked when + disabling a service. + + Values: + CHECK_IF_SERVICE_HAS_USAGE_UNSPECIFIED (0): + When unset, the default behavior is used, + which is SKIP. + SKIP (1): + If set, skip checking service usage when + disabling a service. + CHECK (2): + If set, service usage is checked when disabling the service. + If a service, or its dependents, has usage in the last 30 + days, the request returns a FAILED_PRECONDITION error. + """ + CHECK_IF_SERVICE_HAS_USAGE_UNSPECIFIED = 0 + SKIP = 1 + CHECK = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + disable_dependent_services: bool = proto.Field( + proto.BOOL, + number=2, + ) + check_if_service_has_usage: CheckIfServiceHasUsage = proto.Field( + proto.ENUM, + number=3, + enum=CheckIfServiceHasUsage, + ) + + +class DisableServiceResponse(proto.Message): + r"""Response message for the ``DisableService`` method. This response + message is assigned to the ``response`` field of the returned + Operation when that operation is done. + + Attributes: + service (google.cloud.service_usage_v1.types.Service): + The new state of the service after disabling. + """ + + service: resources.Service = proto.Field( + proto.MESSAGE, + number=1, + message=resources.Service, + ) + + +class GetServiceRequest(proto.Message): + r"""Request message for the ``GetService`` method. + + Attributes: + name (str): + Name of the consumer and service to get the + ``ConsumerState`` for. + + An example name would be: + ``projects/123/services/serviceusage.googleapis.com`` where + ``123`` is the project number. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListServicesRequest(proto.Message): + r"""Request message for the ``ListServices`` method. + + Attributes: + parent (str): + Parent to search for services on. + + An example name would be: ``projects/123`` where ``123`` is + the project number. + page_size (int): + Requested size of the next page of data. + Requested page size cannot exceed 200. + If not set, the default page size is 50. + page_token (str): + Token identifying which result to start with, + which is returned by a previous list call. + filter (str): + Only list services that conform to the given filter. The + allowed filter strings are ``state:ENABLED`` and + ``state:DISABLED``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListServicesResponse(proto.Message): + r"""Response message for the ``ListServices`` method. + + Attributes: + services (MutableSequence[google.cloud.service_usage_v1.types.Service]): + The available services for the requested + project. + next_page_token (str): + Token that can be passed to ``ListServices`` to resume a + paginated query. + """ + + @property + def raw_page(self): + return self + + services: MutableSequence[resources.Service] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Service, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class BatchEnableServicesRequest(proto.Message): + r"""Request message for the ``BatchEnableServices`` method. + + Attributes: + parent (str): + Parent to enable services on. + + An example name would be: ``projects/123`` where ``123`` is + the project number. + + The ``BatchEnableServices`` method currently only supports + projects. + service_ids (MutableSequence[str]): + The identifiers of the services to enable on + the project. + A valid identifier would be: + + serviceusage.googleapis.com + + Enabling services requires that each service is + public or is shared with the user enabling the + service. + + A single request can enable a maximum of 20 + services at a time. If more than 20 services are + specified, the request will fail, and no state + changes will occur. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + service_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class BatchEnableServicesResponse(proto.Message): + r"""Response message for the ``BatchEnableServices`` method. This + response message is assigned to the ``response`` field of the + returned Operation when that operation is done. + + Attributes: + services (MutableSequence[google.cloud.service_usage_v1.types.Service]): + The new state of the services after enabling. + failures (MutableSequence[google.cloud.service_usage_v1.types.BatchEnableServicesResponse.EnableFailure]): + If allow_partial_success is true, and one or more services + could not be enabled, this field contains the details about + each failure. + """ + + class EnableFailure(proto.Message): + r"""Provides error messages for the failing services. + + Attributes: + service_id (str): + The service id of a service that could not be + enabled. + error_message (str): + An error message describing why the service + could not be enabled. + """ + + service_id: str = proto.Field( + proto.STRING, + number=1, + ) + error_message: str = proto.Field( + proto.STRING, + number=2, + ) + + services: MutableSequence[resources.Service] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Service, + ) + failures: MutableSequence[EnableFailure] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=EnableFailure, + ) + + +class BatchGetServicesRequest(proto.Message): + r"""Request message for the ``BatchGetServices`` method. + + Attributes: + parent (str): + Parent to retrieve services from. If this is set, the parent + of all of the services specified in ``names`` must match + this field. An example name would be: ``projects/123`` where + ``123`` is the project number. The ``BatchGetServices`` + method currently only supports projects. + names (MutableSequence[str]): + Names of the services to retrieve. + + An example name would be: + ``projects/123/services/serviceusage.googleapis.com`` where + ``123`` is the project number. A single request can get a + maximum of 30 services at a time. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class BatchGetServicesResponse(proto.Message): + r"""Response message for the ``BatchGetServices`` method. + + Attributes: + services (MutableSequence[google.cloud.service_usage_v1.types.Service]): + The requested Service states. + """ + + services: MutableSequence[resources.Service] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Service, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/mypy.ini b/owl-bot-staging/google-cloud-service-usage/v1/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-cloud-service-usage/v1/noxfile.py b/owl-bot-staging/google-cloud-service-usage/v1/noxfile.py new file mode 100644 index 000000000000..4a5a60b368a6 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/noxfile.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-cloud-service-usage' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.13" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): + """Run the unit test suite.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/service_usage_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + +@nox.session(python=ALL_PYTHON[-1]) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): + """Run the unit test suite against pre-release versions of dependencies.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/service_usage_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_enable_services_async.py b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_enable_services_async.py new file mode 100644 index 000000000000..70a55f7a492c --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_enable_services_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchEnableServices +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-usage + + +# [START serviceusage_v1_generated_ServiceUsage_BatchEnableServices_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import service_usage_v1 + + +async def sample_batch_enable_services(): + # Create a client + client = service_usage_v1.ServiceUsageAsyncClient() + + # Initialize request argument(s) + request = service_usage_v1.BatchEnableServicesRequest( + ) + + # Make the request + operation = client.batch_enable_services(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END serviceusage_v1_generated_ServiceUsage_BatchEnableServices_async] diff --git a/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_enable_services_sync.py b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_enable_services_sync.py new file mode 100644 index 000000000000..8763e3b2d53b --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_enable_services_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchEnableServices +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-usage + + +# [START serviceusage_v1_generated_ServiceUsage_BatchEnableServices_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import service_usage_v1 + + +def sample_batch_enable_services(): + # Create a client + client = service_usage_v1.ServiceUsageClient() + + # Initialize request argument(s) + request = service_usage_v1.BatchEnableServicesRequest( + ) + + # Make the request + operation = client.batch_enable_services(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END serviceusage_v1_generated_ServiceUsage_BatchEnableServices_sync] diff --git a/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_get_services_async.py b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_get_services_async.py new file mode 100644 index 000000000000..07020b420a53 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_get_services_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetServices +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-usage + + +# [START serviceusage_v1_generated_ServiceUsage_BatchGetServices_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import service_usage_v1 + + +async def sample_batch_get_services(): + # Create a client + client = service_usage_v1.ServiceUsageAsyncClient() + + # Initialize request argument(s) + request = service_usage_v1.BatchGetServicesRequest( + ) + + # Make the request + response = await client.batch_get_services(request=request) + + # Handle the response + print(response) + +# [END serviceusage_v1_generated_ServiceUsage_BatchGetServices_async] diff --git a/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_get_services_sync.py b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_get_services_sync.py new file mode 100644 index 000000000000..8d428ed21af9 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_batch_get_services_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetServices +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-usage + + +# [START serviceusage_v1_generated_ServiceUsage_BatchGetServices_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import service_usage_v1 + + +def sample_batch_get_services(): + # Create a client + client = service_usage_v1.ServiceUsageClient() + + # Initialize request argument(s) + request = service_usage_v1.BatchGetServicesRequest( + ) + + # Make the request + response = client.batch_get_services(request=request) + + # Handle the response + print(response) + +# [END serviceusage_v1_generated_ServiceUsage_BatchGetServices_sync] diff --git a/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_disable_service_async.py b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_disable_service_async.py new file mode 100644 index 000000000000..558fa7b526ad --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_disable_service_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DisableService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-usage + + +# [START serviceusage_v1_generated_ServiceUsage_DisableService_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import service_usage_v1 + + +async def sample_disable_service(): + # Create a client + client = service_usage_v1.ServiceUsageAsyncClient() + + # Initialize request argument(s) + request = service_usage_v1.DisableServiceRequest( + ) + + # Make the request + operation = client.disable_service(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END serviceusage_v1_generated_ServiceUsage_DisableService_async] diff --git a/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_disable_service_sync.py b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_disable_service_sync.py new file mode 100644 index 000000000000..7cceda21369a --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_disable_service_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DisableService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-usage + + +# [START serviceusage_v1_generated_ServiceUsage_DisableService_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import service_usage_v1 + + +def sample_disable_service(): + # Create a client + client = service_usage_v1.ServiceUsageClient() + + # Initialize request argument(s) + request = service_usage_v1.DisableServiceRequest( + ) + + # Make the request + operation = client.disable_service(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END serviceusage_v1_generated_ServiceUsage_DisableService_sync] diff --git a/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_enable_service_async.py b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_enable_service_async.py new file mode 100644 index 000000000000..0453b0e5c637 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_enable_service_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnableService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-usage + + +# [START serviceusage_v1_generated_ServiceUsage_EnableService_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import service_usage_v1 + + +async def sample_enable_service(): + # Create a client + client = service_usage_v1.ServiceUsageAsyncClient() + + # Initialize request argument(s) + request = service_usage_v1.EnableServiceRequest( + ) + + # Make the request + operation = client.enable_service(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END serviceusage_v1_generated_ServiceUsage_EnableService_async] diff --git a/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_enable_service_sync.py b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_enable_service_sync.py new file mode 100644 index 000000000000..f50eb5bc7dbb --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_enable_service_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnableService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-usage + + +# [START serviceusage_v1_generated_ServiceUsage_EnableService_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import service_usage_v1 + + +def sample_enable_service(): + # Create a client + client = service_usage_v1.ServiceUsageClient() + + # Initialize request argument(s) + request = service_usage_v1.EnableServiceRequest( + ) + + # Make the request + operation = client.enable_service(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END serviceusage_v1_generated_ServiceUsage_EnableService_sync] diff --git a/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_get_service_async.py b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_get_service_async.py new file mode 100644 index 000000000000..c04e45322a4d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_get_service_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-usage + + +# [START serviceusage_v1_generated_ServiceUsage_GetService_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import service_usage_v1 + + +async def sample_get_service(): + # Create a client + client = service_usage_v1.ServiceUsageAsyncClient() + + # Initialize request argument(s) + request = service_usage_v1.GetServiceRequest( + ) + + # Make the request + response = await client.get_service(request=request) + + # Handle the response + print(response) + +# [END serviceusage_v1_generated_ServiceUsage_GetService_async] diff --git a/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_get_service_sync.py b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_get_service_sync.py new file mode 100644 index 000000000000..20c05e601fbc --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_get_service_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-usage + + +# [START serviceusage_v1_generated_ServiceUsage_GetService_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import service_usage_v1 + + +def sample_get_service(): + # Create a client + client = service_usage_v1.ServiceUsageClient() + + # Initialize request argument(s) + request = service_usage_v1.GetServiceRequest( + ) + + # Make the request + response = client.get_service(request=request) + + # Handle the response + print(response) + +# [END serviceusage_v1_generated_ServiceUsage_GetService_sync] diff --git a/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_list_services_async.py b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_list_services_async.py new file mode 100644 index 000000000000..688361f46f71 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_list_services_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListServices +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-usage + + +# [START serviceusage_v1_generated_ServiceUsage_ListServices_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import service_usage_v1 + + +async def sample_list_services(): + # Create a client + client = service_usage_v1.ServiceUsageAsyncClient() + + # Initialize request argument(s) + request = service_usage_v1.ListServicesRequest( + ) + + # Make the request + page_result = client.list_services(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END serviceusage_v1_generated_ServiceUsage_ListServices_async] diff --git a/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_list_services_sync.py b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_list_services_sync.py new file mode 100644 index 000000000000..456b67138d49 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/serviceusage_v1_generated_service_usage_list_services_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListServices +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-service-usage + + +# [START serviceusage_v1_generated_ServiceUsage_ListServices_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import service_usage_v1 + + +def sample_list_services(): + # Create a client + client = service_usage_v1.ServiceUsageClient() + + # Initialize request argument(s) + request = service_usage_v1.ListServicesRequest( + ) + + # Make the request + page_result = client.list_services(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END serviceusage_v1_generated_ServiceUsage_ListServices_sync] diff --git a/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/snippet_metadata_google.api.serviceusage.v1.json b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/snippet_metadata_google.api.serviceusage.v1.json new file mode 100644 index 000000000000..4ff459eb433d --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/samples/generated_samples/snippet_metadata_google.api.serviceusage.v1.json @@ -0,0 +1,933 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.api.serviceusage.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-service-usage", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.service_usage_v1.ServiceUsageAsyncClient", + "shortName": "ServiceUsageAsyncClient" + }, + "fullName": "google.cloud.service_usage_v1.ServiceUsageAsyncClient.batch_enable_services", + "method": { + "fullName": "google.api.serviceusage.v1.ServiceUsage.BatchEnableServices", + "service": { + "fullName": "google.api.serviceusage.v1.ServiceUsage", + "shortName": "ServiceUsage" + }, + "shortName": "BatchEnableServices" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.service_usage_v1.types.BatchEnableServicesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_enable_services" + }, + "description": "Sample for BatchEnableServices", + "file": "serviceusage_v1_generated_service_usage_batch_enable_services_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "serviceusage_v1_generated_ServiceUsage_BatchEnableServices_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "serviceusage_v1_generated_service_usage_batch_enable_services_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.service_usage_v1.ServiceUsageClient", + "shortName": "ServiceUsageClient" + }, + "fullName": "google.cloud.service_usage_v1.ServiceUsageClient.batch_enable_services", + "method": { + "fullName": "google.api.serviceusage.v1.ServiceUsage.BatchEnableServices", + "service": { + "fullName": "google.api.serviceusage.v1.ServiceUsage", + "shortName": "ServiceUsage" + }, + "shortName": "BatchEnableServices" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.service_usage_v1.types.BatchEnableServicesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_enable_services" + }, + "description": "Sample for BatchEnableServices", + "file": "serviceusage_v1_generated_service_usage_batch_enable_services_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "serviceusage_v1_generated_ServiceUsage_BatchEnableServices_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "serviceusage_v1_generated_service_usage_batch_enable_services_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.service_usage_v1.ServiceUsageAsyncClient", + "shortName": "ServiceUsageAsyncClient" + }, + "fullName": "google.cloud.service_usage_v1.ServiceUsageAsyncClient.batch_get_services", + "method": { + "fullName": "google.api.serviceusage.v1.ServiceUsage.BatchGetServices", + "service": { + "fullName": "google.api.serviceusage.v1.ServiceUsage", + "shortName": "ServiceUsage" + }, + "shortName": "BatchGetServices" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.service_usage_v1.types.BatchGetServicesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.service_usage_v1.types.BatchGetServicesResponse", + "shortName": "batch_get_services" + }, + "description": "Sample for BatchGetServices", + "file": "serviceusage_v1_generated_service_usage_batch_get_services_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "serviceusage_v1_generated_ServiceUsage_BatchGetServices_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "serviceusage_v1_generated_service_usage_batch_get_services_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.service_usage_v1.ServiceUsageClient", + "shortName": "ServiceUsageClient" + }, + "fullName": "google.cloud.service_usage_v1.ServiceUsageClient.batch_get_services", + "method": { + "fullName": "google.api.serviceusage.v1.ServiceUsage.BatchGetServices", + "service": { + "fullName": "google.api.serviceusage.v1.ServiceUsage", + "shortName": "ServiceUsage" + }, + "shortName": "BatchGetServices" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.service_usage_v1.types.BatchGetServicesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.service_usage_v1.types.BatchGetServicesResponse", + "shortName": "batch_get_services" + }, + "description": "Sample for BatchGetServices", + "file": "serviceusage_v1_generated_service_usage_batch_get_services_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "serviceusage_v1_generated_ServiceUsage_BatchGetServices_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "serviceusage_v1_generated_service_usage_batch_get_services_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.service_usage_v1.ServiceUsageAsyncClient", + "shortName": "ServiceUsageAsyncClient" + }, + "fullName": "google.cloud.service_usage_v1.ServiceUsageAsyncClient.disable_service", + "method": { + "fullName": "google.api.serviceusage.v1.ServiceUsage.DisableService", + "service": { + "fullName": "google.api.serviceusage.v1.ServiceUsage", + "shortName": "ServiceUsage" + }, + "shortName": "DisableService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.service_usage_v1.types.DisableServiceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "disable_service" + }, + "description": "Sample for DisableService", + "file": "serviceusage_v1_generated_service_usage_disable_service_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "serviceusage_v1_generated_ServiceUsage_DisableService_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "serviceusage_v1_generated_service_usage_disable_service_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.service_usage_v1.ServiceUsageClient", + "shortName": "ServiceUsageClient" + }, + "fullName": "google.cloud.service_usage_v1.ServiceUsageClient.disable_service", + "method": { + "fullName": "google.api.serviceusage.v1.ServiceUsage.DisableService", + "service": { + "fullName": "google.api.serviceusage.v1.ServiceUsage", + "shortName": "ServiceUsage" + }, + "shortName": "DisableService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.service_usage_v1.types.DisableServiceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "disable_service" + }, + "description": "Sample for DisableService", + "file": "serviceusage_v1_generated_service_usage_disable_service_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "serviceusage_v1_generated_ServiceUsage_DisableService_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "serviceusage_v1_generated_service_usage_disable_service_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.service_usage_v1.ServiceUsageAsyncClient", + "shortName": "ServiceUsageAsyncClient" + }, + "fullName": "google.cloud.service_usage_v1.ServiceUsageAsyncClient.enable_service", + "method": { + "fullName": "google.api.serviceusage.v1.ServiceUsage.EnableService", + "service": { + "fullName": "google.api.serviceusage.v1.ServiceUsage", + "shortName": "ServiceUsage" + }, + "shortName": "EnableService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.service_usage_v1.types.EnableServiceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "enable_service" + }, + "description": "Sample for EnableService", + "file": "serviceusage_v1_generated_service_usage_enable_service_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "serviceusage_v1_generated_ServiceUsage_EnableService_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "serviceusage_v1_generated_service_usage_enable_service_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.service_usage_v1.ServiceUsageClient", + "shortName": "ServiceUsageClient" + }, + "fullName": "google.cloud.service_usage_v1.ServiceUsageClient.enable_service", + "method": { + "fullName": "google.api.serviceusage.v1.ServiceUsage.EnableService", + "service": { + "fullName": "google.api.serviceusage.v1.ServiceUsage", + "shortName": "ServiceUsage" + }, + "shortName": "EnableService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.service_usage_v1.types.EnableServiceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "enable_service" + }, + "description": "Sample for EnableService", + "file": "serviceusage_v1_generated_service_usage_enable_service_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "serviceusage_v1_generated_ServiceUsage_EnableService_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "serviceusage_v1_generated_service_usage_enable_service_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.service_usage_v1.ServiceUsageAsyncClient", + "shortName": "ServiceUsageAsyncClient" + }, + "fullName": "google.cloud.service_usage_v1.ServiceUsageAsyncClient.get_service", + "method": { + "fullName": "google.api.serviceusage.v1.ServiceUsage.GetService", + "service": { + "fullName": "google.api.serviceusage.v1.ServiceUsage", + "shortName": "ServiceUsage" + }, + "shortName": "GetService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.service_usage_v1.types.GetServiceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.service_usage_v1.types.Service", + "shortName": "get_service" + }, + "description": "Sample for GetService", + "file": "serviceusage_v1_generated_service_usage_get_service_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "serviceusage_v1_generated_ServiceUsage_GetService_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "serviceusage_v1_generated_service_usage_get_service_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.service_usage_v1.ServiceUsageClient", + "shortName": "ServiceUsageClient" + }, + "fullName": "google.cloud.service_usage_v1.ServiceUsageClient.get_service", + "method": { + "fullName": "google.api.serviceusage.v1.ServiceUsage.GetService", + "service": { + "fullName": "google.api.serviceusage.v1.ServiceUsage", + "shortName": "ServiceUsage" + }, + "shortName": "GetService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.service_usage_v1.types.GetServiceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.service_usage_v1.types.Service", + "shortName": "get_service" + }, + "description": "Sample for GetService", + "file": "serviceusage_v1_generated_service_usage_get_service_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "serviceusage_v1_generated_ServiceUsage_GetService_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "serviceusage_v1_generated_service_usage_get_service_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.service_usage_v1.ServiceUsageAsyncClient", + "shortName": "ServiceUsageAsyncClient" + }, + "fullName": "google.cloud.service_usage_v1.ServiceUsageAsyncClient.list_services", + "method": { + "fullName": "google.api.serviceusage.v1.ServiceUsage.ListServices", + "service": { + "fullName": "google.api.serviceusage.v1.ServiceUsage", + "shortName": "ServiceUsage" + }, + "shortName": "ListServices" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.service_usage_v1.types.ListServicesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.service_usage_v1.services.service_usage.pagers.ListServicesAsyncPager", + "shortName": "list_services" + }, + "description": "Sample for ListServices", + "file": "serviceusage_v1_generated_service_usage_list_services_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "serviceusage_v1_generated_ServiceUsage_ListServices_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "serviceusage_v1_generated_service_usage_list_services_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.service_usage_v1.ServiceUsageClient", + "shortName": "ServiceUsageClient" + }, + "fullName": "google.cloud.service_usage_v1.ServiceUsageClient.list_services", + "method": { + "fullName": "google.api.serviceusage.v1.ServiceUsage.ListServices", + "service": { + "fullName": "google.api.serviceusage.v1.ServiceUsage", + "shortName": "ServiceUsage" + }, + "shortName": "ListServices" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.service_usage_v1.types.ListServicesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.service_usage_v1.services.service_usage.pagers.ListServicesPager", + "shortName": "list_services" + }, + "description": "Sample for ListServices", + "file": "serviceusage_v1_generated_service_usage_list_services_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "serviceusage_v1_generated_ServiceUsage_ListServices_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "serviceusage_v1_generated_service_usage_list_services_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-cloud-service-usage/v1/scripts/fixup_service_usage_v1_keywords.py b/owl-bot-staging/google-cloud-service-usage/v1/scripts/fixup_service_usage_v1_keywords.py new file mode 100644 index 000000000000..2867c0810f46 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/scripts/fixup_service_usage_v1_keywords.py @@ -0,0 +1,181 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class service_usageCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_enable_services': ('parent', 'service_ids', ), + 'batch_get_services': ('parent', 'names', ), + 'disable_service': ('name', 'disable_dependent_services', 'check_if_service_has_usage', ), + 'enable_service': ('name', ), + 'get_service': ('name', ), + 'list_services': ('parent', 'page_size', 'page_token', 'filter', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=service_usageCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the service_usage client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/setup.py b/owl-bot-staging/google-cloud-service-usage/v1/setup.py new file mode 100644 index 000000000000..fd64d0146043 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/setup.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-service-usage' + + +description = "Google Cloud Service Usage API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/service_usage/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.13.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-service-usage/v1/tests/__init__.py b/owl-bot-staging/google-cloud-service-usage/v1/tests/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-usage/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-service-usage/v1/tests/unit/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-usage/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-service-usage/v1/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-usage/v1/tests/unit/gapic/service_usage_v1/__init__.py b/owl-bot-staging/google-cloud-service-usage/v1/tests/unit/gapic/service_usage_v1/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/tests/unit/gapic/service_usage_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-service-usage/v1/tests/unit/gapic/service_usage_v1/test_service_usage.py b/owl-bot-staging/google-cloud-service-usage/v1/tests/unit/gapic/service_usage_v1/test_service_usage.py new file mode 100644 index 000000000000..a0f70582e552 --- /dev/null +++ b/owl-bot-staging/google-cloud-service-usage/v1/tests/unit/gapic/service_usage_v1/test_service_usage.py @@ -0,0 +1,4761 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.service_usage_v1.services.service_usage import ServiceUsageAsyncClient +from google.cloud.service_usage_v1.services.service_usage import ServiceUsageClient +from google.cloud.service_usage_v1.services.service_usage import pagers +from google.cloud.service_usage_v1.services.service_usage import transports +from google.cloud.service_usage_v1.types import resources +from google.cloud.service_usage_v1.types import serviceusage +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ServiceUsageClient._get_default_mtls_endpoint(None) is None + assert ServiceUsageClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ServiceUsageClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ServiceUsageClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ServiceUsageClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ServiceUsageClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert ServiceUsageClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ServiceUsageClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ServiceUsageClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + ServiceUsageClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ServiceUsageClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ServiceUsageClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ServiceUsageClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ServiceUsageClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ServiceUsageClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ServiceUsageClient._get_client_cert_source(None, False) is None + assert ServiceUsageClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert ServiceUsageClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert ServiceUsageClient._get_client_cert_source(None, True) is mock_default_cert_source + assert ServiceUsageClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(ServiceUsageClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceUsageClient)) +@mock.patch.object(ServiceUsageAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceUsageAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ServiceUsageClient._DEFAULT_UNIVERSE + default_endpoint = ServiceUsageClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = ServiceUsageClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert ServiceUsageClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert ServiceUsageClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == ServiceUsageClient.DEFAULT_MTLS_ENDPOINT + assert ServiceUsageClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert ServiceUsageClient._get_api_endpoint(None, None, default_universe, "always") == ServiceUsageClient.DEFAULT_MTLS_ENDPOINT + assert ServiceUsageClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == ServiceUsageClient.DEFAULT_MTLS_ENDPOINT + assert ServiceUsageClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert ServiceUsageClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + ServiceUsageClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ServiceUsageClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert ServiceUsageClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert ServiceUsageClient._get_universe_domain(None, None) == ServiceUsageClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + ServiceUsageClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ServiceUsageClient, "grpc"), + (ServiceUsageAsyncClient, "grpc_asyncio"), + (ServiceUsageClient, "rest"), +]) +def test_service_usage_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'serviceusage.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://serviceusage.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ServiceUsageGrpcTransport, "grpc"), + (transports.ServiceUsageGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ServiceUsageRestTransport, "rest"), +]) +def test_service_usage_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ServiceUsageClient, "grpc"), + (ServiceUsageAsyncClient, "grpc_asyncio"), + (ServiceUsageClient, "rest"), +]) +def test_service_usage_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'serviceusage.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://serviceusage.googleapis.com' + ) + + +def test_service_usage_client_get_transport_class(): + transport = ServiceUsageClient.get_transport_class() + available_transports = [ + transports.ServiceUsageGrpcTransport, + transports.ServiceUsageRestTransport, + ] + assert transport in available_transports + + transport = ServiceUsageClient.get_transport_class("grpc") + assert transport == transports.ServiceUsageGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ServiceUsageClient, transports.ServiceUsageGrpcTransport, "grpc"), + (ServiceUsageAsyncClient, transports.ServiceUsageGrpcAsyncIOTransport, "grpc_asyncio"), + (ServiceUsageClient, transports.ServiceUsageRestTransport, "rest"), +]) +@mock.patch.object(ServiceUsageClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceUsageClient)) +@mock.patch.object(ServiceUsageAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceUsageAsyncClient)) +def test_service_usage_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ServiceUsageClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ServiceUsageClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ServiceUsageClient, transports.ServiceUsageGrpcTransport, "grpc", "true"), + (ServiceUsageAsyncClient, transports.ServiceUsageGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (ServiceUsageClient, transports.ServiceUsageGrpcTransport, "grpc", "false"), + (ServiceUsageAsyncClient, transports.ServiceUsageGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (ServiceUsageClient, transports.ServiceUsageRestTransport, "rest", "true"), + (ServiceUsageClient, transports.ServiceUsageRestTransport, "rest", "false"), +]) +@mock.patch.object(ServiceUsageClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceUsageClient)) +@mock.patch.object(ServiceUsageAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceUsageAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_service_usage_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ServiceUsageClient, ServiceUsageAsyncClient +]) +@mock.patch.object(ServiceUsageClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceUsageClient)) +@mock.patch.object(ServiceUsageAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceUsageAsyncClient)) +def test_service_usage_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + ServiceUsageClient, ServiceUsageAsyncClient +]) +@mock.patch.object(ServiceUsageClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceUsageClient)) +@mock.patch.object(ServiceUsageAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ServiceUsageAsyncClient)) +def test_service_usage_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ServiceUsageClient._DEFAULT_UNIVERSE + default_endpoint = ServiceUsageClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = ServiceUsageClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ServiceUsageClient, transports.ServiceUsageGrpcTransport, "grpc"), + (ServiceUsageAsyncClient, transports.ServiceUsageGrpcAsyncIOTransport, "grpc_asyncio"), + (ServiceUsageClient, transports.ServiceUsageRestTransport, "rest"), +]) +def test_service_usage_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ServiceUsageClient, transports.ServiceUsageGrpcTransport, "grpc", grpc_helpers), + (ServiceUsageAsyncClient, transports.ServiceUsageGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (ServiceUsageClient, transports.ServiceUsageRestTransport, "rest", None), +]) +def test_service_usage_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_service_usage_client_client_options_from_dict(): + with mock.patch('google.cloud.service_usage_v1.services.service_usage.transports.ServiceUsageGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = ServiceUsageClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ServiceUsageClient, transports.ServiceUsageGrpcTransport, "grpc", grpc_helpers), + (ServiceUsageAsyncClient, transports.ServiceUsageGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_service_usage_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "serviceusage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/service.management', +), + scopes=None, + default_host="serviceusage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + serviceusage.EnableServiceRequest, + dict, +]) +def test_enable_service(request_type, transport: str = 'grpc'): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.enable_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = serviceusage.EnableServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_enable_service_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = serviceusage.EnableServiceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_service), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.enable_service(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == serviceusage.EnableServiceRequest( + name='name_value', + ) + +def test_enable_service_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.enable_service in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.enable_service] = mock_rpc + request = {} + client.enable_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.enable_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_enable_service_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.enable_service in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.enable_service] = mock_rpc + + request = {} + await client.enable_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.enable_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_enable_service_async(transport: str = 'grpc_asyncio', request_type=serviceusage.EnableServiceRequest): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.enable_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = serviceusage.EnableServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_enable_service_async_from_dict(): + await test_enable_service_async(request_type=dict) + +def test_enable_service_field_headers(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serviceusage.EnableServiceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_service), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.enable_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_enable_service_field_headers_async(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serviceusage.EnableServiceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_service), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.enable_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + serviceusage.DisableServiceRequest, + dict, +]) +def test_disable_service(request_type, transport: str = 'grpc'): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.disable_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = serviceusage.DisableServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_disable_service_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = serviceusage.DisableServiceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_service), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.disable_service(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == serviceusage.DisableServiceRequest( + name='name_value', + ) + +def test_disable_service_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.disable_service in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.disable_service] = mock_rpc + request = {} + client.disable_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.disable_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_disable_service_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.disable_service in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.disable_service] = mock_rpc + + request = {} + await client.disable_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.disable_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_disable_service_async(transport: str = 'grpc_asyncio', request_type=serviceusage.DisableServiceRequest): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.disable_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = serviceusage.DisableServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_disable_service_async_from_dict(): + await test_disable_service_async(request_type=dict) + +def test_disable_service_field_headers(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serviceusage.DisableServiceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_service), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.disable_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_disable_service_field_headers_async(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serviceusage.DisableServiceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_service), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.disable_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + serviceusage.GetServiceRequest, + dict, +]) +def test_get_service(request_type, transport: str = 'grpc'): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Service( + name='name_value', + parent='parent_value', + state=resources.State.DISABLED, + ) + response = client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = serviceusage.GetServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Service) + assert response.name == 'name_value' + assert response.parent == 'parent_value' + assert response.state == resources.State.DISABLED + + +def test_get_service_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = serviceusage.GetServiceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_service(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == serviceusage.GetServiceRequest( + name='name_value', + ) + +def test_get_service_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_service in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_service] = mock_rpc + request = {} + client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_service_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_service in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_service] = mock_rpc + + request = {} + await client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_service_async(transport: str = 'grpc_asyncio', request_type=serviceusage.GetServiceRequest): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Service( + name='name_value', + parent='parent_value', + state=resources.State.DISABLED, + )) + response = await client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = serviceusage.GetServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Service) + assert response.name == 'name_value' + assert response.parent == 'parent_value' + assert response.state == resources.State.DISABLED + + +@pytest.mark.asyncio +async def test_get_service_async_from_dict(): + await test_get_service_async(request_type=dict) + +def test_get_service_field_headers(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serviceusage.GetServiceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + call.return_value = resources.Service() + client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_service_field_headers_async(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serviceusage.GetServiceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Service()) + await client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + serviceusage.ListServicesRequest, + dict, +]) +def test_list_services(request_type, transport: str = 'grpc'): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = serviceusage.ListServicesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = serviceusage.ListServicesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServicesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_services_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = serviceusage.ListServicesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_services(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == serviceusage.ListServicesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_services_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_services in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_services] = mock_rpc + request = {} + client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_services(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_services_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_services in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_services] = mock_rpc + + request = {} + await client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_services(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_services_async(transport: str = 'grpc_asyncio', request_type=serviceusage.ListServicesRequest): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(serviceusage.ListServicesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = serviceusage.ListServicesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServicesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_services_async_from_dict(): + await test_list_services_async(request_type=dict) + +def test_list_services_field_headers(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serviceusage.ListServicesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + call.return_value = serviceusage.ListServicesResponse() + client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_services_field_headers_async(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serviceusage.ListServicesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(serviceusage.ListServicesResponse()) + await client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_services_pager(transport_name: str = "grpc"): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + serviceusage.ListServicesResponse( + services=[ + resources.Service(), + resources.Service(), + resources.Service(), + ], + next_page_token='abc', + ), + serviceusage.ListServicesResponse( + services=[], + next_page_token='def', + ), + serviceusage.ListServicesResponse( + services=[ + resources.Service(), + ], + next_page_token='ghi', + ), + serviceusage.ListServicesResponse( + services=[ + resources.Service(), + resources.Service(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_services(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Service) + for i in results) +def test_list_services_pages(transport_name: str = "grpc"): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + serviceusage.ListServicesResponse( + services=[ + resources.Service(), + resources.Service(), + resources.Service(), + ], + next_page_token='abc', + ), + serviceusage.ListServicesResponse( + services=[], + next_page_token='def', + ), + serviceusage.ListServicesResponse( + services=[ + resources.Service(), + ], + next_page_token='ghi', + ), + serviceusage.ListServicesResponse( + services=[ + resources.Service(), + resources.Service(), + ], + ), + RuntimeError, + ) + pages = list(client.list_services(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_services_async_pager(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + serviceusage.ListServicesResponse( + services=[ + resources.Service(), + resources.Service(), + resources.Service(), + ], + next_page_token='abc', + ), + serviceusage.ListServicesResponse( + services=[], + next_page_token='def', + ), + serviceusage.ListServicesResponse( + services=[ + resources.Service(), + ], + next_page_token='ghi', + ), + serviceusage.ListServicesResponse( + services=[ + resources.Service(), + resources.Service(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_services(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Service) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_services_async_pages(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + serviceusage.ListServicesResponse( + services=[ + resources.Service(), + resources.Service(), + resources.Service(), + ], + next_page_token='abc', + ), + serviceusage.ListServicesResponse( + services=[], + next_page_token='def', + ), + serviceusage.ListServicesResponse( + services=[ + resources.Service(), + ], + next_page_token='ghi', + ), + serviceusage.ListServicesResponse( + services=[ + resources.Service(), + resources.Service(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_services(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + serviceusage.BatchEnableServicesRequest, + dict, +]) +def test_batch_enable_services(request_type, transport: str = 'grpc'): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_enable_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.batch_enable_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = serviceusage.BatchEnableServicesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_batch_enable_services_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = serviceusage.BatchEnableServicesRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_enable_services), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.batch_enable_services(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == serviceusage.BatchEnableServicesRequest( + parent='parent_value', + ) + +def test_batch_enable_services_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_enable_services in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_enable_services] = mock_rpc + request = {} + client.batch_enable_services(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.batch_enable_services(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_enable_services_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.batch_enable_services in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.batch_enable_services] = mock_rpc + + request = {} + await client.batch_enable_services(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.batch_enable_services(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_enable_services_async(transport: str = 'grpc_asyncio', request_type=serviceusage.BatchEnableServicesRequest): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_enable_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.batch_enable_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = serviceusage.BatchEnableServicesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_batch_enable_services_async_from_dict(): + await test_batch_enable_services_async(request_type=dict) + +def test_batch_enable_services_field_headers(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serviceusage.BatchEnableServicesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_enable_services), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.batch_enable_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_enable_services_field_headers_async(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serviceusage.BatchEnableServicesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_enable_services), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.batch_enable_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + serviceusage.BatchGetServicesRequest, + dict, +]) +def test_batch_get_services(request_type, transport: str = 'grpc'): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = serviceusage.BatchGetServicesResponse( + ) + response = client.batch_get_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = serviceusage.BatchGetServicesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, serviceusage.BatchGetServicesResponse) + + +def test_batch_get_services_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = serviceusage.BatchGetServicesRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_services), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.batch_get_services(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == serviceusage.BatchGetServicesRequest( + parent='parent_value', + ) + +def test_batch_get_services_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_get_services in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_get_services] = mock_rpc + request = {} + client.batch_get_services(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_get_services(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_get_services_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.batch_get_services in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.batch_get_services] = mock_rpc + + request = {} + await client.batch_get_services(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_get_services(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_get_services_async(transport: str = 'grpc_asyncio', request_type=serviceusage.BatchGetServicesRequest): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(serviceusage.BatchGetServicesResponse( + )) + response = await client.batch_get_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = serviceusage.BatchGetServicesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, serviceusage.BatchGetServicesResponse) + + +@pytest.mark.asyncio +async def test_batch_get_services_async_from_dict(): + await test_batch_get_services_async(request_type=dict) + +def test_batch_get_services_field_headers(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serviceusage.BatchGetServicesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_services), + '__call__') as call: + call.return_value = serviceusage.BatchGetServicesResponse() + client.batch_get_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_get_services_field_headers_async(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serviceusage.BatchGetServicesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_services), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(serviceusage.BatchGetServicesResponse()) + await client.batch_get_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_enable_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.enable_service in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.enable_service] = mock_rpc + + request = {} + client.enable_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.enable_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_disable_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.disable_service in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.disable_service] = mock_rpc + + request = {} + client.disable_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.disable_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_service in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_service] = mock_rpc + + request = {} + client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_services_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_services in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_services] = mock_rpc + + request = {} + client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_services(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_services_rest_pager(transport: str = 'rest'): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + serviceusage.ListServicesResponse( + services=[ + resources.Service(), + resources.Service(), + resources.Service(), + ], + next_page_token='abc', + ), + serviceusage.ListServicesResponse( + services=[], + next_page_token='def', + ), + serviceusage.ListServicesResponse( + services=[ + resources.Service(), + ], + next_page_token='ghi', + ), + serviceusage.ListServicesResponse( + services=[ + resources.Service(), + resources.Service(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(serviceusage.ListServicesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'sample1/sample2'} + + pager = client.list_services(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Service) + for i in results) + + pages = list(client.list_services(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_batch_enable_services_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_enable_services in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_enable_services] = mock_rpc + + request = {} + client.batch_enable_services(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.batch_enable_services(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_get_services_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_get_services in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_get_services] = mock_rpc + + request = {} + client.batch_get_services(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_get_services(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ServiceUsageGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ServiceUsageGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceUsageClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ServiceUsageGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ServiceUsageClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ServiceUsageClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ServiceUsageGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceUsageClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ServiceUsageGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ServiceUsageClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ServiceUsageGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ServiceUsageGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.ServiceUsageGrpcTransport, + transports.ServiceUsageGrpcAsyncIOTransport, + transports.ServiceUsageRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = ServiceUsageClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_enable_service_empty_call_grpc(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.enable_service), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.enable_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.EnableServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_disable_service_empty_call_grpc(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.disable_service), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.disable_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.DisableServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_service_empty_call_grpc(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + call.return_value = resources.Service() + client.get_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.GetServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_services_empty_call_grpc(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + call.return_value = serviceusage.ListServicesResponse() + client.list_services(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.ListServicesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_enable_services_empty_call_grpc(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_enable_services), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.batch_enable_services(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.BatchEnableServicesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_get_services_empty_call_grpc(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_services), + '__call__') as call: + call.return_value = serviceusage.BatchGetServicesResponse() + client.batch_get_services(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.BatchGetServicesRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ServiceUsageAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_enable_service_empty_call_grpc_asyncio(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.enable_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.enable_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.EnableServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_disable_service_empty_call_grpc_asyncio(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.disable_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.disable_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.DisableServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_service_empty_call_grpc_asyncio(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Service( + name='name_value', + parent='parent_value', + state=resources.State.DISABLED, + )) + await client.get_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.GetServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_services_empty_call_grpc_asyncio(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(serviceusage.ListServicesResponse( + next_page_token='next_page_token_value', + )) + await client.list_services(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.ListServicesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_enable_services_empty_call_grpc_asyncio(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_enable_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.batch_enable_services(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.BatchEnableServicesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_get_services_empty_call_grpc_asyncio(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_services), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(serviceusage.BatchGetServicesResponse( + )) + await client.batch_get_services(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.BatchGetServicesRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = ServiceUsageClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_enable_service_rest_bad_request(request_type=serviceusage.EnableServiceRequest): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/services/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.enable_service(request) + + +@pytest.mark.parametrize("request_type", [ + serviceusage.EnableServiceRequest, + dict, +]) +def test_enable_service_rest_call_success(request_type): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/services/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.enable_service(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enable_service_rest_interceptors(null_interceptor): + transport = transports.ServiceUsageRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceUsageRestInterceptor(), + ) + client = ServiceUsageClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.ServiceUsageRestInterceptor, "post_enable_service") as post, \ + mock.patch.object(transports.ServiceUsageRestInterceptor, "pre_enable_service") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = serviceusage.EnableServiceRequest.pb(serviceusage.EnableServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = serviceusage.EnableServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.enable_service(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_disable_service_rest_bad_request(request_type=serviceusage.DisableServiceRequest): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/services/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.disable_service(request) + + +@pytest.mark.parametrize("request_type", [ + serviceusage.DisableServiceRequest, + dict, +]) +def test_disable_service_rest_call_success(request_type): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/services/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.disable_service(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_disable_service_rest_interceptors(null_interceptor): + transport = transports.ServiceUsageRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceUsageRestInterceptor(), + ) + client = ServiceUsageClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.ServiceUsageRestInterceptor, "post_disable_service") as post, \ + mock.patch.object(transports.ServiceUsageRestInterceptor, "pre_disable_service") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = serviceusage.DisableServiceRequest.pb(serviceusage.DisableServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = serviceusage.DisableServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.disable_service(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_service_rest_bad_request(request_type=serviceusage.GetServiceRequest): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/services/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_service(request) + + +@pytest.mark.parametrize("request_type", [ + serviceusage.GetServiceRequest, + dict, +]) +def test_get_service_rest_call_success(request_type): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/services/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.Service( + name='name_value', + parent='parent_value', + state=resources.State.DISABLED, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Service.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_service(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Service) + assert response.name == 'name_value' + assert response.parent == 'parent_value' + assert response.state == resources.State.DISABLED + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_service_rest_interceptors(null_interceptor): + transport = transports.ServiceUsageRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceUsageRestInterceptor(), + ) + client = ServiceUsageClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceUsageRestInterceptor, "post_get_service") as post, \ + mock.patch.object(transports.ServiceUsageRestInterceptor, "pre_get_service") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = serviceusage.GetServiceRequest.pb(serviceusage.GetServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = resources.Service.to_json(resources.Service()) + req.return_value.content = return_value + + request = serviceusage.GetServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Service() + + client.get_service(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_services_rest_bad_request(request_type=serviceusage.ListServicesRequest): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_services(request) + + +@pytest.mark.parametrize("request_type", [ + serviceusage.ListServicesRequest, + dict, +]) +def test_list_services_rest_call_success(request_type): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = serviceusage.ListServicesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = serviceusage.ListServicesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_services(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServicesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_services_rest_interceptors(null_interceptor): + transport = transports.ServiceUsageRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceUsageRestInterceptor(), + ) + client = ServiceUsageClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceUsageRestInterceptor, "post_list_services") as post, \ + mock.patch.object(transports.ServiceUsageRestInterceptor, "pre_list_services") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = serviceusage.ListServicesRequest.pb(serviceusage.ListServicesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = serviceusage.ListServicesResponse.to_json(serviceusage.ListServicesResponse()) + req.return_value.content = return_value + + request = serviceusage.ListServicesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = serviceusage.ListServicesResponse() + + client.list_services(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_enable_services_rest_bad_request(request_type=serviceusage.BatchEnableServicesRequest): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.batch_enable_services(request) + + +@pytest.mark.parametrize("request_type", [ + serviceusage.BatchEnableServicesRequest, + dict, +]) +def test_batch_enable_services_rest_call_success(request_type): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.batch_enable_services(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_enable_services_rest_interceptors(null_interceptor): + transport = transports.ServiceUsageRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceUsageRestInterceptor(), + ) + client = ServiceUsageClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.ServiceUsageRestInterceptor, "post_batch_enable_services") as post, \ + mock.patch.object(transports.ServiceUsageRestInterceptor, "pre_batch_enable_services") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = serviceusage.BatchEnableServicesRequest.pb(serviceusage.BatchEnableServicesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = serviceusage.BatchEnableServicesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_enable_services(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_get_services_rest_bad_request(request_type=serviceusage.BatchGetServicesRequest): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.batch_get_services(request) + + +@pytest.mark.parametrize("request_type", [ + serviceusage.BatchGetServicesRequest, + dict, +]) +def test_batch_get_services_rest_call_success(request_type): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = serviceusage.BatchGetServicesResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = serviceusage.BatchGetServicesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.batch_get_services(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, serviceusage.BatchGetServicesResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_get_services_rest_interceptors(null_interceptor): + transport = transports.ServiceUsageRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceUsageRestInterceptor(), + ) + client = ServiceUsageClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceUsageRestInterceptor, "post_batch_get_services") as post, \ + mock.patch.object(transports.ServiceUsageRestInterceptor, "pre_batch_get_services") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = serviceusage.BatchGetServicesRequest.pb(serviceusage.BatchGetServicesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = serviceusage.BatchGetServicesResponse.to_json(serviceusage.BatchGetServicesResponse()) + req.return_value.content = return_value + + request = serviceusage.BatchGetServicesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = serviceusage.BatchGetServicesResponse() + + client.batch_get_services(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'operations/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'operations/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_enable_service_empty_call_rest(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.enable_service), + '__call__') as call: + client.enable_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.EnableServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_disable_service_empty_call_rest(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.disable_service), + '__call__') as call: + client.disable_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.DisableServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_service_empty_call_rest(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_service), + '__call__') as call: + client.get_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.GetServiceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_services_empty_call_rest(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_services), + '__call__') as call: + client.list_services(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.ListServicesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_enable_services_empty_call_rest(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_enable_services), + '__call__') as call: + client.batch_enable_services(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.BatchEnableServicesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_get_services_empty_call_rest(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_services), + '__call__') as call: + client.batch_get_services(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = serviceusage.BatchGetServicesRequest() + + assert args[0] == request_msg + + +def test_service_usage_rest_lro_client(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ServiceUsageGrpcTransport, + ) + +def test_service_usage_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ServiceUsageTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_service_usage_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.service_usage_v1.services.service_usage.transports.ServiceUsageTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ServiceUsageTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'enable_service', + 'disable_service', + 'get_service', + 'list_services', + 'batch_enable_services', + 'batch_get_services', + 'get_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_service_usage_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.service_usage_v1.services.service_usage.transports.ServiceUsageTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServiceUsageTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/service.management', +), + quota_project_id="octopus", + ) + + +def test_service_usage_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.service_usage_v1.services.service_usage.transports.ServiceUsageTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServiceUsageTransport() + adc.assert_called_once() + + +def test_service_usage_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ServiceUsageClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/service.management', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceUsageGrpcTransport, + transports.ServiceUsageGrpcAsyncIOTransport, + ], +) +def test_service_usage_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/service.management',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceUsageGrpcTransport, + transports.ServiceUsageGrpcAsyncIOTransport, + transports.ServiceUsageRestTransport, + ], +) +def test_service_usage_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ServiceUsageGrpcTransport, grpc_helpers), + (transports.ServiceUsageGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_service_usage_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "serviceusage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/service.management', +), + scopes=["1", "2"], + default_host="serviceusage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.ServiceUsageGrpcTransport, transports.ServiceUsageGrpcAsyncIOTransport]) +def test_service_usage_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_service_usage_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ServiceUsageRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_service_usage_host_no_port(transport_name): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='serviceusage.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'serviceusage.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://serviceusage.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_service_usage_host_with_port(transport_name): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='serviceusage.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'serviceusage.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://serviceusage.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_service_usage_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ServiceUsageClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ServiceUsageClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.enable_service._session + session2 = client2.transport.enable_service._session + assert session1 != session2 + session1 = client1.transport.disable_service._session + session2 = client2.transport.disable_service._session + assert session1 != session2 + session1 = client1.transport.get_service._session + session2 = client2.transport.get_service._session + assert session1 != session2 + session1 = client1.transport.list_services._session + session2 = client2.transport.list_services._session + assert session1 != session2 + session1 = client1.transport.batch_enable_services._session + session2 = client2.transport.batch_enable_services._session + assert session1 != session2 + session1 = client1.transport.batch_get_services._session + session2 = client2.transport.batch_get_services._session + assert session1 != session2 +def test_service_usage_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ServiceUsageGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_service_usage_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ServiceUsageGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ServiceUsageGrpcTransport, transports.ServiceUsageGrpcAsyncIOTransport]) +def test_service_usage_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ServiceUsageGrpcTransport, transports.ServiceUsageGrpcAsyncIOTransport]) +def test_service_usage_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_service_usage_grpc_lro_client(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_service_usage_grpc_lro_async_client(): + client = ServiceUsageAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_service_path(): + project = "squid" + service = "clam" + expected = "projects/{project}/services/{service}".format(project=project, service=service, ) + actual = ServiceUsageClient.service_path(project, service) + assert expected == actual + + +def test_parse_service_path(): + expected = { + "project": "whelk", + "service": "octopus", + } + path = ServiceUsageClient.service_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceUsageClient.parse_service_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ServiceUsageClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = ServiceUsageClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceUsageClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = ServiceUsageClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = ServiceUsageClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceUsageClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ServiceUsageClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = ServiceUsageClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceUsageClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = ServiceUsageClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = ServiceUsageClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceUsageClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ServiceUsageClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = ServiceUsageClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceUsageClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ServiceUsageTransport, '_prep_wrapped_messages') as prep: + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ServiceUsageTransport, '_prep_wrapped_messages') as prep: + transport_class = ServiceUsageClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_operation(transport: str = "grpc"): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = ServiceUsageAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = ServiceUsageClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ServiceUsageClient, transports.ServiceUsageGrpcTransport), + (ServiceUsageAsyncClient, transports.ServiceUsageGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + )