From f666d8afbe7565595289888fae8622cf95f28b4a Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 18 May 2023 08:48:12 +0000 Subject: [PATCH 1/2] feat: add Oracle to PostgreSQL migration APIs PiperOrigin-RevId: 533037268 Source-Link: https://github.com/googleapis/googleapis/commit/d45c9a2409c9e362d163d899f8479cd92959f93e Source-Link: https://github.com/googleapis/googleapis-gen/commit/40c33043e3d79a0dd38ec29dabde66490dcce809 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDBjMzMwNDNlM2Q3OWEwZGQzOGVjMjlkYWJkZTY2NDkwZGNjZTgwOSJ9 --- owl-bot-staging/v1/.coveragerc | 13 + owl-bot-staging/v1/.flake8 | 33 + owl-bot-staging/v1/MANIFEST.in | 2 + owl-bot-staging/v1/README.rst | 49 + .../clouddms_v1/data_migration_service.rst | 10 + .../v1/docs/clouddms_v1/services.rst | 6 + owl-bot-staging/v1/docs/clouddms_v1/types.rst | 6 + owl-bot-staging/v1/docs/conf.py | 376 + owl-bot-staging/v1/docs/index.rst | 7 + .../v1/google/cloud/clouddms/__init__.py | 219 + .../v1/google/cloud/clouddms/gapic_version.py | 16 + .../v1/google/cloud/clouddms/py.typed | 2 + .../v1/google/cloud/clouddms_v1/__init__.py | 220 + .../cloud/clouddms_v1/gapic_metadata.json | 383 + .../google/cloud/clouddms_v1/gapic_version.py | 16 + .../v1/google/cloud/clouddms_v1/py.typed | 2 + .../cloud/clouddms_v1/services/__init__.py | 15 + .../data_migration_service/__init__.py | 22 + .../data_migration_service/async_client.py | 4804 +++++++ .../services/data_migration_service/client.py | 5053 +++++++ .../services/data_migration_service/pagers.py | 746 ++ .../transports/__init__.py | 33 + .../data_migration_service/transports/base.py | 733 ++ .../data_migration_service/transports/grpc.py | 1430 ++ .../transports/grpc_asyncio.py | 1429 ++ .../cloud/clouddms_v1/types/__init__.py | 216 + .../cloud/clouddms_v1/types/clouddms.py | 1718 +++ .../clouddms_v1/types/clouddms_resources.py | 2025 +++ .../types/conversionworkspace_resources.py | 1221 ++ owl-bot-staging/v1/mypy.ini | 3 + owl-bot-staging/v1/noxfile.py | 184 + ...ervice_apply_conversion_workspace_async.py | 57 + ...service_apply_conversion_workspace_sync.py | 57 + ...rvice_commit_conversion_workspace_async.py | 56 + ...ervice_commit_conversion_workspace_sync.py | 56 + ...vice_convert_conversion_workspace_async.py | 55 + ...rvice_convert_conversion_workspace_sync.py | 55 + ...service_create_connection_profile_async.py | 64 + ..._service_create_connection_profile_sync.py | 64 + ...rvice_create_conversion_workspace_async.py | 64 + ...ervice_create_conversion_workspace_sync.py | 64 + ...tion_service_create_migration_job_async.py | 65 + ...ation_service_create_migration_job_sync.py | 65 + ...service_create_private_connection_async.py | 62 + ..._service_create_private_connection_sync.py | 62 + ...service_delete_connection_profile_async.py | 56 + ..._service_delete_connection_profile_sync.py | 56 + ...rvice_delete_conversion_workspace_async.py | 56 + ...ervice_delete_conversion_workspace_sync.py | 56 + ...tion_service_delete_migration_job_async.py | 56 + ...ation_service_delete_migration_job_sync.py | 56 + ...service_delete_private_connection_async.py | 56 + ..._service_delete_private_connection_sync.py | 56 + ...be_conversion_workspace_revisions_async.py | 52 + ...ibe_conversion_workspace_revisions_sync.py | 52 + ...ervice_describe_database_entities_async.py | 53 + ...service_describe_database_entities_sync.py | 53 + ...igration_service_fetch_static_ips_async.py | 53 + ...migration_service_fetch_static_ips_sync.py | 53 + ...ation_service_generate_ssh_script_async.py | 56 + ...ration_service_generate_ssh_script_sync.py | 56 + ...on_service_get_connection_profile_async.py | 52 + ...ion_service_get_connection_profile_sync.py | 52 + ..._service_get_conversion_workspace_async.py | 52 + ...n_service_get_conversion_workspace_sync.py | 52 + ...gration_service_get_migration_job_async.py | 52 + ...igration_service_get_migration_job_sync.py | 52 + ...on_service_get_private_connection_async.py | 52 + ...ion_service_get_private_connection_sync.py | 52 + ...tion_service_import_mapping_rules_async.py | 56 + ...ation_service_import_mapping_rules_sync.py | 56 + ..._service_list_connection_profiles_async.py | 53 + ...n_service_list_connection_profiles_sync.py | 53 + ...ervice_list_conversion_workspaces_async.py | 53 + ...service_list_conversion_workspaces_sync.py | 53 + ...ation_service_list_migration_jobs_async.py | 53 + ...ration_service_list_migration_jobs_sync.py | 53 + ..._service_list_private_connections_async.py | 53 + ...n_service_list_private_connections_sync.py | 53 + ...ion_service_promote_migration_job_async.py | 55 + ...tion_service_promote_migration_job_sync.py | 55 + ...ion_service_restart_migration_job_async.py | 55 + ...tion_service_restart_migration_job_sync.py | 55 + ...tion_service_resume_migration_job_async.py | 55 + ...ation_service_resume_migration_job_sync.py | 55 + ...ice_rollback_conversion_workspace_async.py | 56 + ...vice_rollback_conversion_workspace_sync.py | 56 + ...on_service_search_background_jobs_async.py | 52 + ...ion_service_search_background_jobs_sync.py | 52 + ...service_seed_conversion_workspace_async.py | 56 + ..._service_seed_conversion_workspace_sync.py | 56 + ...ation_service_start_migration_job_async.py | 55 + ...ration_service_start_migration_job_sync.py | 55 + ...ration_service_stop_migration_job_async.py | 55 + ...gration_service_stop_migration_job_sync.py | 55 + ...service_update_connection_profile_async.py | 62 + ..._service_update_connection_profile_sync.py | 62 + ...rvice_update_conversion_workspace_async.py | 62 + ...ervice_update_conversion_workspace_sync.py | 62 + ...tion_service_update_migration_job_async.py | 63 + ...ation_service_update_migration_job_sync.py | 63 + ...tion_service_verify_migration_job_async.py | 55 + ...ation_service_verify_migration_job_sync.py | 55 + ...pet_metadata_google.cloud.clouddms.v1.json | 5771 ++++++++ .../v1/scripts/fixup_clouddms_v1_keywords.py | 211 + owl-bot-staging/v1/setup.py | 91 + .../v1/testing/constraints-3.10.txt | 7 + .../v1/testing/constraints-3.11.txt | 7 + .../v1/testing/constraints-3.12.txt | 7 + .../v1/testing/constraints-3.7.txt | 10 + .../v1/testing/constraints-3.8.txt | 7 + .../v1/testing/constraints-3.9.txt | 7 + owl-bot-staging/v1/tests/__init__.py | 16 + owl-bot-staging/v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/clouddms_v1/__init__.py | 16 + .../test_data_migration_service.py | 10874 ++++++++++++++++ 117 files changed, 42084 insertions(+) create mode 100644 owl-bot-staging/v1/.coveragerc create mode 100644 owl-bot-staging/v1/.flake8 create mode 100644 owl-bot-staging/v1/MANIFEST.in create mode 100644 owl-bot-staging/v1/README.rst create mode 100644 owl-bot-staging/v1/docs/clouddms_v1/data_migration_service.rst create mode 100644 owl-bot-staging/v1/docs/clouddms_v1/services.rst create mode 100644 owl-bot-staging/v1/docs/clouddms_v1/types.rst create mode 100644 owl-bot-staging/v1/docs/conf.py create mode 100644 owl-bot-staging/v1/docs/index.rst create mode 100644 owl-bot-staging/v1/google/cloud/clouddms/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms/gapic_version.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_metadata.json create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_version.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/async_client.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/client.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/pagers.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/types/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms_resources.py create mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/types/conversionworkspace_resources.py create mode 100644 owl-bot-staging/v1/mypy.ini create mode 100644 owl-bot-staging/v1/noxfile.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json create mode 100644 owl-bot-staging/v1/scripts/fixup_clouddms_v1_keywords.py create mode 100644 owl-bot-staging/v1/setup.py create mode 100644 owl-bot-staging/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/v1/tests/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/test_data_migration_service.py diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc new file mode 100644 index 0000000..437b0aa --- /dev/null +++ b/owl-bot-staging/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/clouddms/__init__.py + google/cloud/clouddms/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/v1/.flake8 b/owl-bot-staging/v1/.flake8 new file mode 100644 index 0000000..29227d4 --- /dev/null +++ b/owl-bot-staging/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in new file mode 100644 index 0000000..b318e50 --- /dev/null +++ b/owl-bot-staging/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/clouddms *.py +recursive-include google/cloud/clouddms_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst new file mode 100644 index 0000000..94ac6f5 --- /dev/null +++ b/owl-bot-staging/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Clouddms API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Clouddms API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/clouddms_v1/data_migration_service.rst b/owl-bot-staging/v1/docs/clouddms_v1/data_migration_service.rst new file mode 100644 index 0000000..86f0b88 --- /dev/null +++ b/owl-bot-staging/v1/docs/clouddms_v1/data_migration_service.rst @@ -0,0 +1,10 @@ +DataMigrationService +-------------------------------------- + +.. automodule:: google.cloud.clouddms_v1.services.data_migration_service + :members: + :inherited-members: + +.. automodule:: google.cloud.clouddms_v1.services.data_migration_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/clouddms_v1/services.rst b/owl-bot-staging/v1/docs/clouddms_v1/services.rst new file mode 100644 index 0000000..89359f2 --- /dev/null +++ b/owl-bot-staging/v1/docs/clouddms_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Clouddms v1 API +========================================= +.. toctree:: + :maxdepth: 2 + + data_migration_service diff --git a/owl-bot-staging/v1/docs/clouddms_v1/types.rst b/owl-bot-staging/v1/docs/clouddms_v1/types.rst new file mode 100644 index 0000000..26b87db --- /dev/null +++ b/owl-bot-staging/v1/docs/clouddms_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Clouddms v1 API +====================================== + +.. automodule:: google.cloud.clouddms_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py new file mode 100644 index 0000000..9bc1929 --- /dev/null +++ b/owl-bot-staging/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-dms documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-dms" +copyright = u"2022, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-dms-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-dms.tex", + u"google-cloud-dms Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-dms", + u"Google Cloud Clouddms Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-dms", + u"google-cloud-dms Documentation", + author, + "google-cloud-dms", + "GAPIC library for Google Cloud Clouddms API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst new file mode 100644 index 0000000..83f0cba --- /dev/null +++ b/owl-bot-staging/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + clouddms_v1/services + clouddms_v1/types diff --git a/owl-bot-staging/v1/google/cloud/clouddms/__init__.py b/owl-bot-staging/v1/google/cloud/clouddms/__init__.py new file mode 100644 index 0000000..0850ec8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms/__init__.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.clouddms import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.clouddms_v1.services.data_migration_service.client import DataMigrationServiceClient +from google.cloud.clouddms_v1.services.data_migration_service.async_client import DataMigrationServiceAsyncClient + +from google.cloud.clouddms_v1.types.clouddms import ApplyConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import CommitConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import ConvertConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import CreateConnectionProfileRequest +from google.cloud.clouddms_v1.types.clouddms import CreateConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import CreateMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import CreatePrivateConnectionRequest +from google.cloud.clouddms_v1.types.clouddms import DeleteConnectionProfileRequest +from google.cloud.clouddms_v1.types.clouddms import DeleteConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import DeleteMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import DeletePrivateConnectionRequest +from google.cloud.clouddms_v1.types.clouddms import DescribeConversionWorkspaceRevisionsRequest +from google.cloud.clouddms_v1.types.clouddms import DescribeConversionWorkspaceRevisionsResponse +from google.cloud.clouddms_v1.types.clouddms import DescribeDatabaseEntitiesRequest +from google.cloud.clouddms_v1.types.clouddms import DescribeDatabaseEntitiesResponse +from google.cloud.clouddms_v1.types.clouddms import FetchStaticIpsRequest +from google.cloud.clouddms_v1.types.clouddms import FetchStaticIpsResponse +from google.cloud.clouddms_v1.types.clouddms import GenerateSshScriptRequest +from google.cloud.clouddms_v1.types.clouddms import GetConnectionProfileRequest +from google.cloud.clouddms_v1.types.clouddms import GetConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import GetMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import GetPrivateConnectionRequest +from google.cloud.clouddms_v1.types.clouddms import ImportMappingRulesRequest +from google.cloud.clouddms_v1.types.clouddms import ListConnectionProfilesRequest +from google.cloud.clouddms_v1.types.clouddms import ListConnectionProfilesResponse +from google.cloud.clouddms_v1.types.clouddms import ListConversionWorkspacesRequest +from google.cloud.clouddms_v1.types.clouddms import ListConversionWorkspacesResponse +from google.cloud.clouddms_v1.types.clouddms import ListMigrationJobsRequest +from google.cloud.clouddms_v1.types.clouddms import ListMigrationJobsResponse +from google.cloud.clouddms_v1.types.clouddms import ListPrivateConnectionsRequest +from google.cloud.clouddms_v1.types.clouddms import ListPrivateConnectionsResponse +from google.cloud.clouddms_v1.types.clouddms import OperationMetadata +from google.cloud.clouddms_v1.types.clouddms import PromoteMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import RestartMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import ResumeMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import RollbackConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import SearchBackgroundJobsRequest +from google.cloud.clouddms_v1.types.clouddms import SearchBackgroundJobsResponse +from google.cloud.clouddms_v1.types.clouddms import SeedConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import SshScript +from google.cloud.clouddms_v1.types.clouddms import StartMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import StopMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import UpdateConnectionProfileRequest +from google.cloud.clouddms_v1.types.clouddms import UpdateConversionWorkspaceRequest +from google.cloud.clouddms_v1.types.clouddms import UpdateMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import VerifyMigrationJobRequest +from google.cloud.clouddms_v1.types.clouddms import VmCreationConfig +from google.cloud.clouddms_v1.types.clouddms import VmSelectionConfig +from google.cloud.clouddms_v1.types.clouddms_resources import AlloyDbConnectionProfile +from google.cloud.clouddms_v1.types.clouddms_resources import AlloyDbSettings +from google.cloud.clouddms_v1.types.clouddms_resources import CloudSqlConnectionProfile +from google.cloud.clouddms_v1.types.clouddms_resources import CloudSqlSettings +from google.cloud.clouddms_v1.types.clouddms_resources import ConnectionProfile +from google.cloud.clouddms_v1.types.clouddms_resources import ConversionWorkspaceInfo +from google.cloud.clouddms_v1.types.clouddms_resources import DatabaseType +from google.cloud.clouddms_v1.types.clouddms_resources import ForwardSshTunnelConnectivity +from google.cloud.clouddms_v1.types.clouddms_resources import MigrationJob +from google.cloud.clouddms_v1.types.clouddms_resources import MigrationJobVerificationError +from google.cloud.clouddms_v1.types.clouddms_resources import MySqlConnectionProfile +from google.cloud.clouddms_v1.types.clouddms_resources import OracleConnectionProfile +from google.cloud.clouddms_v1.types.clouddms_resources import PostgreSqlConnectionProfile +from google.cloud.clouddms_v1.types.clouddms_resources import PrivateConnection +from google.cloud.clouddms_v1.types.clouddms_resources import PrivateConnectivity +from google.cloud.clouddms_v1.types.clouddms_resources import PrivateServiceConnectConnectivity +from google.cloud.clouddms_v1.types.clouddms_resources import ReverseSshConnectivity +from google.cloud.clouddms_v1.types.clouddms_resources import SqlAclEntry +from google.cloud.clouddms_v1.types.clouddms_resources import SqlIpConfig +from google.cloud.clouddms_v1.types.clouddms_resources import SslConfig +from google.cloud.clouddms_v1.types.clouddms_resources import StaticIpConnectivity +from google.cloud.clouddms_v1.types.clouddms_resources import StaticServiceIpConnectivity +from google.cloud.clouddms_v1.types.clouddms_resources import VpcPeeringConfig +from google.cloud.clouddms_v1.types.clouddms_resources import VpcPeeringConnectivity +from google.cloud.clouddms_v1.types.clouddms_resources import DatabaseEngine +from google.cloud.clouddms_v1.types.clouddms_resources import DatabaseProvider +from google.cloud.clouddms_v1.types.clouddms_resources import NetworkArchitecture +from google.cloud.clouddms_v1.types.conversionworkspace_resources import BackgroundJobLogEntry +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ColumnEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ConstraintEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ConversionWorkspace +from google.cloud.clouddms_v1.types.conversionworkspace_resources import DatabaseEngineInfo +from google.cloud.clouddms_v1.types.conversionworkspace_resources import DatabaseEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import EntityMapping +from google.cloud.clouddms_v1.types.conversionworkspace_resources import EntityMappingLogEntry +from google.cloud.clouddms_v1.types.conversionworkspace_resources import FunctionEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import IndexEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import PackageEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import SchemaEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import SequenceEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import StoredProcedureEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import SynonymEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import TableEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import TriggerEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ViewEntity +from google.cloud.clouddms_v1.types.conversionworkspace_resources import BackgroundJobType +from google.cloud.clouddms_v1.types.conversionworkspace_resources import DatabaseEntityType +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ImportRulesFileFormat + +__all__ = ('DataMigrationServiceClient', + 'DataMigrationServiceAsyncClient', + 'ApplyConversionWorkspaceRequest', + 'CommitConversionWorkspaceRequest', + 'ConvertConversionWorkspaceRequest', + 'CreateConnectionProfileRequest', + 'CreateConversionWorkspaceRequest', + 'CreateMigrationJobRequest', + 'CreatePrivateConnectionRequest', + 'DeleteConnectionProfileRequest', + 'DeleteConversionWorkspaceRequest', + 'DeleteMigrationJobRequest', + 'DeletePrivateConnectionRequest', + 'DescribeConversionWorkspaceRevisionsRequest', + 'DescribeConversionWorkspaceRevisionsResponse', + 'DescribeDatabaseEntitiesRequest', + 'DescribeDatabaseEntitiesResponse', + 'FetchStaticIpsRequest', + 'FetchStaticIpsResponse', + 'GenerateSshScriptRequest', + 'GetConnectionProfileRequest', + 'GetConversionWorkspaceRequest', + 'GetMigrationJobRequest', + 'GetPrivateConnectionRequest', + 'ImportMappingRulesRequest', + 'ListConnectionProfilesRequest', + 'ListConnectionProfilesResponse', + 'ListConversionWorkspacesRequest', + 'ListConversionWorkspacesResponse', + 'ListMigrationJobsRequest', + 'ListMigrationJobsResponse', + 'ListPrivateConnectionsRequest', + 'ListPrivateConnectionsResponse', + 'OperationMetadata', + 'PromoteMigrationJobRequest', + 'RestartMigrationJobRequest', + 'ResumeMigrationJobRequest', + 'RollbackConversionWorkspaceRequest', + 'SearchBackgroundJobsRequest', + 'SearchBackgroundJobsResponse', + 'SeedConversionWorkspaceRequest', + 'SshScript', + 'StartMigrationJobRequest', + 'StopMigrationJobRequest', + 'UpdateConnectionProfileRequest', + 'UpdateConversionWorkspaceRequest', + 'UpdateMigrationJobRequest', + 'VerifyMigrationJobRequest', + 'VmCreationConfig', + 'VmSelectionConfig', + 'AlloyDbConnectionProfile', + 'AlloyDbSettings', + 'CloudSqlConnectionProfile', + 'CloudSqlSettings', + 'ConnectionProfile', + 'ConversionWorkspaceInfo', + 'DatabaseType', + 'ForwardSshTunnelConnectivity', + 'MigrationJob', + 'MigrationJobVerificationError', + 'MySqlConnectionProfile', + 'OracleConnectionProfile', + 'PostgreSqlConnectionProfile', + 'PrivateConnection', + 'PrivateConnectivity', + 'PrivateServiceConnectConnectivity', + 'ReverseSshConnectivity', + 'SqlAclEntry', + 'SqlIpConfig', + 'SslConfig', + 'StaticIpConnectivity', + 'StaticServiceIpConnectivity', + 'VpcPeeringConfig', + 'VpcPeeringConnectivity', + 'DatabaseEngine', + 'DatabaseProvider', + 'NetworkArchitecture', + 'BackgroundJobLogEntry', + 'ColumnEntity', + 'ConstraintEntity', + 'ConversionWorkspace', + 'DatabaseEngineInfo', + 'DatabaseEntity', + 'EntityMapping', + 'EntityMappingLogEntry', + 'FunctionEntity', + 'IndexEntity', + 'PackageEntity', + 'SchemaEntity', + 'SequenceEntity', + 'StoredProcedureEntity', + 'SynonymEntity', + 'TableEntity', + 'TriggerEntity', + 'ViewEntity', + 'BackgroundJobType', + 'DatabaseEntityType', + 'ImportRulesFileFormat', +) diff --git a/owl-bot-staging/v1/google/cloud/clouddms/gapic_version.py b/owl-bot-staging/v1/google/cloud/clouddms/gapic_version.py new file mode 100644 index 0000000..405b1ce --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/clouddms/py.typed b/owl-bot-staging/v1/google/cloud/clouddms/py.typed new file mode 100644 index 0000000..d368a62 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dms package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/__init__.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/__init__.py new file mode 100644 index 0000000..0656b9b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/__init__.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.clouddms_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.data_migration_service import DataMigrationServiceClient +from .services.data_migration_service import DataMigrationServiceAsyncClient + +from .types.clouddms import ApplyConversionWorkspaceRequest +from .types.clouddms import CommitConversionWorkspaceRequest +from .types.clouddms import ConvertConversionWorkspaceRequest +from .types.clouddms import CreateConnectionProfileRequest +from .types.clouddms import CreateConversionWorkspaceRequest +from .types.clouddms import CreateMigrationJobRequest +from .types.clouddms import CreatePrivateConnectionRequest +from .types.clouddms import DeleteConnectionProfileRequest +from .types.clouddms import DeleteConversionWorkspaceRequest +from .types.clouddms import DeleteMigrationJobRequest +from .types.clouddms import DeletePrivateConnectionRequest +from .types.clouddms import DescribeConversionWorkspaceRevisionsRequest +from .types.clouddms import DescribeConversionWorkspaceRevisionsResponse +from .types.clouddms import DescribeDatabaseEntitiesRequest +from .types.clouddms import DescribeDatabaseEntitiesResponse +from .types.clouddms import FetchStaticIpsRequest +from .types.clouddms import FetchStaticIpsResponse +from .types.clouddms import GenerateSshScriptRequest +from .types.clouddms import GetConnectionProfileRequest +from .types.clouddms import GetConversionWorkspaceRequest +from .types.clouddms import GetMigrationJobRequest +from .types.clouddms import GetPrivateConnectionRequest +from .types.clouddms import ImportMappingRulesRequest +from .types.clouddms import ListConnectionProfilesRequest +from .types.clouddms import ListConnectionProfilesResponse +from .types.clouddms import ListConversionWorkspacesRequest +from .types.clouddms import ListConversionWorkspacesResponse +from .types.clouddms import ListMigrationJobsRequest +from .types.clouddms import ListMigrationJobsResponse +from .types.clouddms import ListPrivateConnectionsRequest +from .types.clouddms import ListPrivateConnectionsResponse +from .types.clouddms import OperationMetadata +from .types.clouddms import PromoteMigrationJobRequest +from .types.clouddms import RestartMigrationJobRequest +from .types.clouddms import ResumeMigrationJobRequest +from .types.clouddms import RollbackConversionWorkspaceRequest +from .types.clouddms import SearchBackgroundJobsRequest +from .types.clouddms import SearchBackgroundJobsResponse +from .types.clouddms import SeedConversionWorkspaceRequest +from .types.clouddms import SshScript +from .types.clouddms import StartMigrationJobRequest +from .types.clouddms import StopMigrationJobRequest +from .types.clouddms import UpdateConnectionProfileRequest +from .types.clouddms import UpdateConversionWorkspaceRequest +from .types.clouddms import UpdateMigrationJobRequest +from .types.clouddms import VerifyMigrationJobRequest +from .types.clouddms import VmCreationConfig +from .types.clouddms import VmSelectionConfig +from .types.clouddms_resources import AlloyDbConnectionProfile +from .types.clouddms_resources import AlloyDbSettings +from .types.clouddms_resources import CloudSqlConnectionProfile +from .types.clouddms_resources import CloudSqlSettings +from .types.clouddms_resources import ConnectionProfile +from .types.clouddms_resources import ConversionWorkspaceInfo +from .types.clouddms_resources import DatabaseType +from .types.clouddms_resources import ForwardSshTunnelConnectivity +from .types.clouddms_resources import MigrationJob +from .types.clouddms_resources import MigrationJobVerificationError +from .types.clouddms_resources import MySqlConnectionProfile +from .types.clouddms_resources import OracleConnectionProfile +from .types.clouddms_resources import PostgreSqlConnectionProfile +from .types.clouddms_resources import PrivateConnection +from .types.clouddms_resources import PrivateConnectivity +from .types.clouddms_resources import PrivateServiceConnectConnectivity +from .types.clouddms_resources import ReverseSshConnectivity +from .types.clouddms_resources import SqlAclEntry +from .types.clouddms_resources import SqlIpConfig +from .types.clouddms_resources import SslConfig +from .types.clouddms_resources import StaticIpConnectivity +from .types.clouddms_resources import StaticServiceIpConnectivity +from .types.clouddms_resources import VpcPeeringConfig +from .types.clouddms_resources import VpcPeeringConnectivity +from .types.clouddms_resources import DatabaseEngine +from .types.clouddms_resources import DatabaseProvider +from .types.clouddms_resources import NetworkArchitecture +from .types.conversionworkspace_resources import BackgroundJobLogEntry +from .types.conversionworkspace_resources import ColumnEntity +from .types.conversionworkspace_resources import ConstraintEntity +from .types.conversionworkspace_resources import ConversionWorkspace +from .types.conversionworkspace_resources import DatabaseEngineInfo +from .types.conversionworkspace_resources import DatabaseEntity +from .types.conversionworkspace_resources import EntityMapping +from .types.conversionworkspace_resources import EntityMappingLogEntry +from .types.conversionworkspace_resources import FunctionEntity +from .types.conversionworkspace_resources import IndexEntity +from .types.conversionworkspace_resources import PackageEntity +from .types.conversionworkspace_resources import SchemaEntity +from .types.conversionworkspace_resources import SequenceEntity +from .types.conversionworkspace_resources import StoredProcedureEntity +from .types.conversionworkspace_resources import SynonymEntity +from .types.conversionworkspace_resources import TableEntity +from .types.conversionworkspace_resources import TriggerEntity +from .types.conversionworkspace_resources import ViewEntity +from .types.conversionworkspace_resources import BackgroundJobType +from .types.conversionworkspace_resources import DatabaseEntityType +from .types.conversionworkspace_resources import ImportRulesFileFormat + +__all__ = ( + 'DataMigrationServiceAsyncClient', +'AlloyDbConnectionProfile', +'AlloyDbSettings', +'ApplyConversionWorkspaceRequest', +'BackgroundJobLogEntry', +'BackgroundJobType', +'CloudSqlConnectionProfile', +'CloudSqlSettings', +'ColumnEntity', +'CommitConversionWorkspaceRequest', +'ConnectionProfile', +'ConstraintEntity', +'ConversionWorkspace', +'ConversionWorkspaceInfo', +'ConvertConversionWorkspaceRequest', +'CreateConnectionProfileRequest', +'CreateConversionWorkspaceRequest', +'CreateMigrationJobRequest', +'CreatePrivateConnectionRequest', +'DataMigrationServiceClient', +'DatabaseEngine', +'DatabaseEngineInfo', +'DatabaseEntity', +'DatabaseEntityType', +'DatabaseProvider', +'DatabaseType', +'DeleteConnectionProfileRequest', +'DeleteConversionWorkspaceRequest', +'DeleteMigrationJobRequest', +'DeletePrivateConnectionRequest', +'DescribeConversionWorkspaceRevisionsRequest', +'DescribeConversionWorkspaceRevisionsResponse', +'DescribeDatabaseEntitiesRequest', +'DescribeDatabaseEntitiesResponse', +'EntityMapping', +'EntityMappingLogEntry', +'FetchStaticIpsRequest', +'FetchStaticIpsResponse', +'ForwardSshTunnelConnectivity', +'FunctionEntity', +'GenerateSshScriptRequest', +'GetConnectionProfileRequest', +'GetConversionWorkspaceRequest', +'GetMigrationJobRequest', +'GetPrivateConnectionRequest', +'ImportMappingRulesRequest', +'ImportRulesFileFormat', +'IndexEntity', +'ListConnectionProfilesRequest', +'ListConnectionProfilesResponse', +'ListConversionWorkspacesRequest', +'ListConversionWorkspacesResponse', +'ListMigrationJobsRequest', +'ListMigrationJobsResponse', +'ListPrivateConnectionsRequest', +'ListPrivateConnectionsResponse', +'MigrationJob', +'MigrationJobVerificationError', +'MySqlConnectionProfile', +'NetworkArchitecture', +'OperationMetadata', +'OracleConnectionProfile', +'PackageEntity', +'PostgreSqlConnectionProfile', +'PrivateConnection', +'PrivateConnectivity', +'PrivateServiceConnectConnectivity', +'PromoteMigrationJobRequest', +'RestartMigrationJobRequest', +'ResumeMigrationJobRequest', +'ReverseSshConnectivity', +'RollbackConversionWorkspaceRequest', +'SchemaEntity', +'SearchBackgroundJobsRequest', +'SearchBackgroundJobsResponse', +'SeedConversionWorkspaceRequest', +'SequenceEntity', +'SqlAclEntry', +'SqlIpConfig', +'SshScript', +'SslConfig', +'StartMigrationJobRequest', +'StaticIpConnectivity', +'StaticServiceIpConnectivity', +'StopMigrationJobRequest', +'StoredProcedureEntity', +'SynonymEntity', +'TableEntity', +'TriggerEntity', +'UpdateConnectionProfileRequest', +'UpdateConversionWorkspaceRequest', +'UpdateMigrationJobRequest', +'VerifyMigrationJobRequest', +'ViewEntity', +'VmCreationConfig', +'VmSelectionConfig', +'VpcPeeringConfig', +'VpcPeeringConnectivity', +) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_metadata.json new file mode 100644 index 0000000..a1d6f43 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_metadata.json @@ -0,0 +1,383 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.clouddms_v1", + "protoPackage": "google.cloud.clouddms.v1", + "schema": "1.0", + "services": { + "DataMigrationService": { + "clients": { + "grpc": { + "libraryClient": "DataMigrationServiceClient", + "rpcs": { + "ApplyConversionWorkspace": { + "methods": [ + "apply_conversion_workspace" + ] + }, + "CommitConversionWorkspace": { + "methods": [ + "commit_conversion_workspace" + ] + }, + "ConvertConversionWorkspace": { + "methods": [ + "convert_conversion_workspace" + ] + }, + "CreateConnectionProfile": { + "methods": [ + "create_connection_profile" + ] + }, + "CreateConversionWorkspace": { + "methods": [ + "create_conversion_workspace" + ] + }, + "CreateMigrationJob": { + "methods": [ + "create_migration_job" + ] + }, + "CreatePrivateConnection": { + "methods": [ + "create_private_connection" + ] + }, + "DeleteConnectionProfile": { + "methods": [ + "delete_connection_profile" + ] + }, + "DeleteConversionWorkspace": { + "methods": [ + "delete_conversion_workspace" + ] + }, + "DeleteMigrationJob": { + "methods": [ + "delete_migration_job" + ] + }, + "DeletePrivateConnection": { + "methods": [ + "delete_private_connection" + ] + }, + "DescribeConversionWorkspaceRevisions": { + "methods": [ + "describe_conversion_workspace_revisions" + ] + }, + "DescribeDatabaseEntities": { + "methods": [ + "describe_database_entities" + ] + }, + "FetchStaticIps": { + "methods": [ + "fetch_static_ips" + ] + }, + "GenerateSshScript": { + "methods": [ + "generate_ssh_script" + ] + }, + "GetConnectionProfile": { + "methods": [ + "get_connection_profile" + ] + }, + "GetConversionWorkspace": { + "methods": [ + "get_conversion_workspace" + ] + }, + "GetMigrationJob": { + "methods": [ + "get_migration_job" + ] + }, + "GetPrivateConnection": { + "methods": [ + "get_private_connection" + ] + }, + "ImportMappingRules": { + "methods": [ + "import_mapping_rules" + ] + }, + "ListConnectionProfiles": { + "methods": [ + "list_connection_profiles" + ] + }, + "ListConversionWorkspaces": { + "methods": [ + "list_conversion_workspaces" + ] + }, + "ListMigrationJobs": { + "methods": [ + "list_migration_jobs" + ] + }, + "ListPrivateConnections": { + "methods": [ + "list_private_connections" + ] + }, + "PromoteMigrationJob": { + "methods": [ + "promote_migration_job" + ] + }, + "RestartMigrationJob": { + "methods": [ + "restart_migration_job" + ] + }, + "ResumeMigrationJob": { + "methods": [ + "resume_migration_job" + ] + }, + "RollbackConversionWorkspace": { + "methods": [ + "rollback_conversion_workspace" + ] + }, + "SearchBackgroundJobs": { + "methods": [ + "search_background_jobs" + ] + }, + "SeedConversionWorkspace": { + "methods": [ + "seed_conversion_workspace" + ] + }, + "StartMigrationJob": { + "methods": [ + "start_migration_job" + ] + }, + "StopMigrationJob": { + "methods": [ + "stop_migration_job" + ] + }, + "UpdateConnectionProfile": { + "methods": [ + "update_connection_profile" + ] + }, + "UpdateConversionWorkspace": { + "methods": [ + "update_conversion_workspace" + ] + }, + "UpdateMigrationJob": { + "methods": [ + "update_migration_job" + ] + }, + "VerifyMigrationJob": { + "methods": [ + "verify_migration_job" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataMigrationServiceAsyncClient", + "rpcs": { + "ApplyConversionWorkspace": { + "methods": [ + "apply_conversion_workspace" + ] + }, + "CommitConversionWorkspace": { + "methods": [ + "commit_conversion_workspace" + ] + }, + "ConvertConversionWorkspace": { + "methods": [ + "convert_conversion_workspace" + ] + }, + "CreateConnectionProfile": { + "methods": [ + "create_connection_profile" + ] + }, + "CreateConversionWorkspace": { + "methods": [ + "create_conversion_workspace" + ] + }, + "CreateMigrationJob": { + "methods": [ + "create_migration_job" + ] + }, + "CreatePrivateConnection": { + "methods": [ + "create_private_connection" + ] + }, + "DeleteConnectionProfile": { + "methods": [ + "delete_connection_profile" + ] + }, + "DeleteConversionWorkspace": { + "methods": [ + "delete_conversion_workspace" + ] + }, + "DeleteMigrationJob": { + "methods": [ + "delete_migration_job" + ] + }, + "DeletePrivateConnection": { + "methods": [ + "delete_private_connection" + ] + }, + "DescribeConversionWorkspaceRevisions": { + "methods": [ + "describe_conversion_workspace_revisions" + ] + }, + "DescribeDatabaseEntities": { + "methods": [ + "describe_database_entities" + ] + }, + "FetchStaticIps": { + "methods": [ + "fetch_static_ips" + ] + }, + "GenerateSshScript": { + "methods": [ + "generate_ssh_script" + ] + }, + "GetConnectionProfile": { + "methods": [ + "get_connection_profile" + ] + }, + "GetConversionWorkspace": { + "methods": [ + "get_conversion_workspace" + ] + }, + "GetMigrationJob": { + "methods": [ + "get_migration_job" + ] + }, + "GetPrivateConnection": { + "methods": [ + "get_private_connection" + ] + }, + "ImportMappingRules": { + "methods": [ + "import_mapping_rules" + ] + }, + "ListConnectionProfiles": { + "methods": [ + "list_connection_profiles" + ] + }, + "ListConversionWorkspaces": { + "methods": [ + "list_conversion_workspaces" + ] + }, + "ListMigrationJobs": { + "methods": [ + "list_migration_jobs" + ] + }, + "ListPrivateConnections": { + "methods": [ + "list_private_connections" + ] + }, + "PromoteMigrationJob": { + "methods": [ + "promote_migration_job" + ] + }, + "RestartMigrationJob": { + "methods": [ + "restart_migration_job" + ] + }, + "ResumeMigrationJob": { + "methods": [ + "resume_migration_job" + ] + }, + "RollbackConversionWorkspace": { + "methods": [ + "rollback_conversion_workspace" + ] + }, + "SearchBackgroundJobs": { + "methods": [ + "search_background_jobs" + ] + }, + "SeedConversionWorkspace": { + "methods": [ + "seed_conversion_workspace" + ] + }, + "StartMigrationJob": { + "methods": [ + "start_migration_job" + ] + }, + "StopMigrationJob": { + "methods": [ + "stop_migration_job" + ] + }, + "UpdateConnectionProfile": { + "methods": [ + "update_connection_profile" + ] + }, + "UpdateConversionWorkspace": { + "methods": [ + "update_conversion_workspace" + ] + }, + "UpdateMigrationJob": { + "methods": [ + "update_migration_job" + ] + }, + "VerifyMigrationJob": { + "methods": [ + "verify_migration_job" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_version.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_version.py new file mode 100644 index 0000000..405b1ce --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/py.typed b/owl-bot-staging/v1/google/cloud/clouddms_v1/py.typed new file mode 100644 index 0000000..d368a62 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dms package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/__init__.py new file mode 100644 index 0000000..e8e1c38 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/__init__.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/__init__.py new file mode 100644 index 0000000..253bb20 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DataMigrationServiceClient +from .async_client import DataMigrationServiceAsyncClient + +__all__ = ( + 'DataMigrationServiceClient', + 'DataMigrationServiceAsyncClient', +) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/async_client.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/async_client.py new file mode 100644 index 0000000..31a1e73 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/async_client.py @@ -0,0 +1,4804 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.clouddms_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.clouddms_v1.services.data_migration_service import pagers +from google.cloud.clouddms_v1.types import clouddms +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import DataMigrationServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DataMigrationServiceGrpcAsyncIOTransport +from .client import DataMigrationServiceClient + + +class DataMigrationServiceAsyncClient: + """Database Migration service""" + + _client: DataMigrationServiceClient + + DEFAULT_ENDPOINT = DataMigrationServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataMigrationServiceClient.DEFAULT_MTLS_ENDPOINT + + connection_profile_path = staticmethod(DataMigrationServiceClient.connection_profile_path) + parse_connection_profile_path = staticmethod(DataMigrationServiceClient.parse_connection_profile_path) + conversion_workspace_path = staticmethod(DataMigrationServiceClient.conversion_workspace_path) + parse_conversion_workspace_path = staticmethod(DataMigrationServiceClient.parse_conversion_workspace_path) + migration_job_path = staticmethod(DataMigrationServiceClient.migration_job_path) + parse_migration_job_path = staticmethod(DataMigrationServiceClient.parse_migration_job_path) + networks_path = staticmethod(DataMigrationServiceClient.networks_path) + parse_networks_path = staticmethod(DataMigrationServiceClient.parse_networks_path) + private_connection_path = staticmethod(DataMigrationServiceClient.private_connection_path) + parse_private_connection_path = staticmethod(DataMigrationServiceClient.parse_private_connection_path) + common_billing_account_path = staticmethod(DataMigrationServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DataMigrationServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DataMigrationServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(DataMigrationServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(DataMigrationServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(DataMigrationServiceClient.parse_common_organization_path) + common_project_path = staticmethod(DataMigrationServiceClient.common_project_path) + parse_common_project_path = staticmethod(DataMigrationServiceClient.parse_common_project_path) + common_location_path = staticmethod(DataMigrationServiceClient.common_location_path) + parse_common_location_path = staticmethod(DataMigrationServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataMigrationServiceAsyncClient: The constructed client. + """ + return DataMigrationServiceClient.from_service_account_info.__func__(DataMigrationServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataMigrationServiceAsyncClient: The constructed client. + """ + return DataMigrationServiceClient.from_service_account_file.__func__(DataMigrationServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DataMigrationServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DataMigrationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataMigrationServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(DataMigrationServiceClient).get_transport_class, type(DataMigrationServiceClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DataMigrationServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data migration service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DataMigrationServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataMigrationServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_migration_jobs(self, + request: Optional[Union[clouddms.ListMigrationJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMigrationJobsAsyncPager: + r"""Lists migration jobs in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_list_migration_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListMigrationJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ListMigrationJobsRequest, dict]]): + The request object. Retrieves a list of all migration + jobs in a given project and location. + parent (:class:`str`): + Required. The parent which owns this + collection of migrationJobs. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsAsyncPager: + Response message for + 'ListMigrationJobs' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.ListMigrationJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_migration_jobs, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMigrationJobsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_migration_job(self, + request: Optional[Union[clouddms.GetMigrationJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.MigrationJob: + r"""Gets details of a single migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_get_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetMigrationJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_migration_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GetMigrationJobRequest, dict]]): + The request object. Request message for 'GetMigrationJob' + request. + name (:class:`str`): + Required. Name of the migration job + resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.MigrationJob: + Represents a Database Migration + Service migration job object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.GetMigrationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_migration_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_migration_job(self, + request: Optional[Union[clouddms.CreateMigrationJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + migration_job: Optional[clouddms_resources.MigrationJob] = None, + migration_job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new migration job in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_create_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.CreateMigrationJobRequest( + parent="parent_value", + migration_job_id="migration_job_id_value", + migration_job=migration_job, + ) + + # Make the request + operation = client.create_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CreateMigrationJobRequest, dict]]): + The request object. Request message to create a new + Database Migration Service migration job + in the specified project and region. + parent (:class:`str`): + Required. The parent which owns this + collection of migration jobs. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + migration_job (:class:`google.cloud.clouddms_v1.types.MigrationJob`): + Required. Represents a `migration + job `__ + object. + + This corresponds to the ``migration_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + migration_job_id (:class:`str`): + Required. The ID of the instance to + create. + + This corresponds to the ``migration_job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, migration_job, migration_job_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.CreateMigrationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if migration_job is not None: + request.migration_job = migration_job + if migration_job_id is not None: + request.migration_job_id = migration_job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_migration_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_migration_job(self, + request: Optional[Union[clouddms.UpdateMigrationJobRequest, dict]] = None, + *, + migration_job: Optional[clouddms_resources.MigrationJob] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_update_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.UpdateMigrationJobRequest( + migration_job=migration_job, + ) + + # Make the request + operation = client.update_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.UpdateMigrationJobRequest, dict]]): + The request object. Request message for + 'UpdateMigrationJob' request. + migration_job (:class:`google.cloud.clouddms_v1.types.MigrationJob`): + Required. The migration job + parameters to update. + + This corresponds to the ``migration_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([migration_job, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.UpdateMigrationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if migration_job is not None: + request.migration_job = migration_job + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_migration_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("migration_job.name", request.migration_job.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_migration_job(self, + request: Optional[Union[clouddms.DeleteMigrationJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_delete_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteMigrationJobRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DeleteMigrationJobRequest, dict]]): + The request object. Request message for + 'DeleteMigrationJob' request. + name (:class:`str`): + Required. Name of the migration job + resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.DeleteMigrationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_migration_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def start_migration_job(self, + request: Optional[Union[clouddms.StartMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Start an already created migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_start_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.StartMigrationJobRequest( + ) + + # Make the request + operation = client.start_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.StartMigrationJobRequest, dict]]): + The request object. Request message for + 'StartMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + request = clouddms.StartMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.start_migration_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def stop_migration_job(self, + request: Optional[Union[clouddms.StopMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Stops a running migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_stop_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.StopMigrationJobRequest( + ) + + # Make the request + operation = client.stop_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.StopMigrationJobRequest, dict]]): + The request object. Request message for + 'StopMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + request = clouddms.StopMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.stop_migration_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def resume_migration_job(self, + request: Optional[Union[clouddms.ResumeMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Resume a migration job that is currently stopped and + is resumable (was stopped during CDC phase). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_resume_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ResumeMigrationJobRequest( + ) + + # Make the request + operation = client.resume_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ResumeMigrationJobRequest, dict]]): + The request object. Request message for + 'ResumeMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + request = clouddms.ResumeMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resume_migration_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def promote_migration_job(self, + request: Optional[Union[clouddms.PromoteMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Promote a migration job, stopping replication to the + destination and promoting the destination to be a + standalone database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_promote_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.PromoteMigrationJobRequest( + ) + + # Make the request + operation = client.promote_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.PromoteMigrationJobRequest, dict]]): + The request object. Request message for + 'PromoteMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + request = clouddms.PromoteMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.promote_migration_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def verify_migration_job(self, + request: Optional[Union[clouddms.VerifyMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Verify a migration job, making sure the destination + can reach the source and that all configuration and + prerequisites are met. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_verify_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.VerifyMigrationJobRequest( + ) + + # Make the request + operation = client.verify_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.VerifyMigrationJobRequest, dict]]): + The request object. Request message for + 'VerifyMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + request = clouddms.VerifyMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.verify_migration_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def restart_migration_job(self, + request: Optional[Union[clouddms.RestartMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Restart a stopped or failed migration job, resetting + the destination instance to its original state and + starting the migration process from scratch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_restart_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.RestartMigrationJobRequest( + ) + + # Make the request + operation = client.restart_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.RestartMigrationJobRequest, dict]]): + The request object. Request message for + 'RestartMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + request = clouddms.RestartMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.restart_migration_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def generate_ssh_script(self, + request: Optional[Union[clouddms.GenerateSshScriptRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.SshScript: + r"""Generate a SSH configuration script to configure the + reverse SSH connectivity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_generate_ssh_script(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + vm_creation_config = clouddms_v1.VmCreationConfig() + vm_creation_config.vm_machine_type = "vm_machine_type_value" + + request = clouddms_v1.GenerateSshScriptRequest( + vm_creation_config=vm_creation_config, + vm="vm_value", + ) + + # Make the request + response = await client.generate_ssh_script(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GenerateSshScriptRequest, dict]]): + The request object. Request message for + 'GenerateSshScript' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.SshScript: + Response message for + 'GenerateSshScript' request. + + """ + # Create or coerce a protobuf request object. + request = clouddms.GenerateSshScriptRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_ssh_script, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("migration_job", request.migration_job), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_connection_profiles(self, + request: Optional[Union[clouddms.ListConnectionProfilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConnectionProfilesAsyncPager: + r"""Retrieves a list of all connection profiles in a + given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_list_connection_profiles(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConnectionProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connection_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ListConnectionProfilesRequest, dict]]): + The request object. Request message for + 'ListConnectionProfiles' request. + parent (:class:`str`): + Required. The parent which owns this + collection of connection profiles. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesAsyncPager: + Response message for + 'ListConnectionProfiles' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.ListConnectionProfilesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_connection_profiles, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListConnectionProfilesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_connection_profile(self, + request: Optional[Union[clouddms.GetConnectionProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.ConnectionProfile: + r"""Gets details of a single connection profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_get_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConnectionProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_connection_profile(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GetConnectionProfileRequest, dict]]): + The request object. Request message for + 'GetConnectionProfile' request. + name (:class:`str`): + Required. Name of the connection + profile resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.ConnectionProfile: + A connection profile definition. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.GetConnectionProfileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_connection_profile, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_connection_profile(self, + request: Optional[Union[clouddms.CreateConnectionProfileRequest, dict]] = None, + *, + parent: Optional[str] = None, + connection_profile: Optional[clouddms_resources.ConnectionProfile] = None, + connection_profile_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new connection profile in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_create_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.CreateConnectionProfileRequest( + parent="parent_value", + connection_profile_id="connection_profile_id_value", + connection_profile=connection_profile, + ) + + # Make the request + operation = client.create_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CreateConnectionProfileRequest, dict]]): + The request object. Request message for + 'CreateConnectionProfile' request. + parent (:class:`str`): + Required. The parent which owns this + collection of connection profiles. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_profile (:class:`google.cloud.clouddms_v1.types.ConnectionProfile`): + Required. The create request body + including the connection profile data + + This corresponds to the ``connection_profile`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_profile_id (:class:`str`): + Required. The connection profile + identifier. + + This corresponds to the ``connection_profile_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConnectionProfile` + A connection profile definition. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, connection_profile, connection_profile_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.CreateConnectionProfileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if connection_profile is not None: + request.connection_profile = connection_profile + if connection_profile_id is not None: + request.connection_profile_id = connection_profile_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_connection_profile, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.ConnectionProfile, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_connection_profile(self, + request: Optional[Union[clouddms.UpdateConnectionProfileRequest, dict]] = None, + *, + connection_profile: Optional[clouddms_resources.ConnectionProfile] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Update the configuration of a single connection + profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_update_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.UpdateConnectionProfileRequest( + connection_profile=connection_profile, + ) + + # Make the request + operation = client.update_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.UpdateConnectionProfileRequest, dict]]): + The request object. Request message for + 'UpdateConnectionProfile' request. + connection_profile (:class:`google.cloud.clouddms_v1.types.ConnectionProfile`): + Required. The connection profile + parameters to update. + + This corresponds to the ``connection_profile`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConnectionProfile` + A connection profile definition. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([connection_profile, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.UpdateConnectionProfileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if connection_profile is not None: + request.connection_profile = connection_profile + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_connection_profile, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("connection_profile.name", request.connection_profile.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.ConnectionProfile, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_connection_profile(self, + request: Optional[Union[clouddms.DeleteConnectionProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Database Migration Service + connection profile. A connection profile can only be + deleted if it is not in use by any active migration + jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_delete_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConnectionProfileRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest, dict]]): + The request object. Request message for + 'DeleteConnectionProfile' request. + name (:class:`str`): + Required. Name of the connection + profile resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.DeleteConnectionProfileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_connection_profile, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_private_connection(self, + request: Optional[Union[clouddms.CreatePrivateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + private_connection: Optional[clouddms_resources.PrivateConnection] = None, + private_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new private connection in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_create_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + private_connection = clouddms_v1.PrivateConnection() + private_connection.vpc_peering_config.vpc_name = "vpc_name_value" + private_connection.vpc_peering_config.subnet = "subnet_value" + + request = clouddms_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest, dict]]): + The request object. Request message to create a new + private connection in the specified + project and region. + parent (:class:`str`): + Required. The parent that owns the + collection of PrivateConnections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection (:class:`google.cloud.clouddms_v1.types.PrivateConnection`): + Required. The private connection + resource to create. + + This corresponds to the ``private_connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection_id (:class:`str`): + Required. The private connection + identifier. + + This corresponds to the ``private_connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.clouddms_v1.types.PrivateConnection` The PrivateConnection resource is used to establish private connectivity + with the customer's network. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, private_connection, private_connection_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.CreatePrivateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if private_connection is not None: + request.private_connection = private_connection + if private_connection_id is not None: + request.private_connection_id = private_connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_private_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.PrivateConnection, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_private_connection(self, + request: Optional[Union[clouddms.GetPrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.PrivateConnection: + r"""Gets details of a single private connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_get_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetPrivateConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_private_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GetPrivateConnectionRequest, dict]]): + The request object. Request message to get a private + connection resource. + name (:class:`str`): + Required. The name of the private + connection to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.PrivateConnection: + The PrivateConnection resource is + used to establish private connectivity + with the customer's network. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.GetPrivateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_private_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_private_connections(self, + request: Optional[Union[clouddms.ListPrivateConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPrivateConnectionsAsyncPager: + r"""Retrieves a list of private connections in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_list_private_connections(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListPrivateConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_private_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest, dict]]): + The request object. Request message to retrieve a list of + private connections in a given project + and location. + parent (:class:`str`): + Required. The parent that owns the + collection of private connections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsAsyncPager: + Response message for + 'ListPrivateConnections' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.ListPrivateConnectionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_private_connections, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPrivateConnectionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_private_connection(self, + request: Optional[Union[clouddms.DeletePrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Database Migration Service private + connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_delete_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeletePrivateConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest, dict]]): + The request object. Request message to delete a private + connection. + name (:class:`str`): + Required. The name of the private + connection to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.DeletePrivateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_private_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_conversion_workspace(self, + request: Optional[Union[clouddms.GetConversionWorkspaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversionworkspace_resources.ConversionWorkspace: + r"""Gets details of a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_get_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conversion_workspace(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'GetConversionWorkspace' request. + name (:class:`str`): + Required. Name of the conversion + workspace resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.ConversionWorkspace: + The main conversion workspace + resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.GetConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_conversion_workspaces(self, + request: Optional[Union[clouddms.ListConversionWorkspacesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConversionWorkspacesAsyncPager: + r"""Lists conversion workspaces in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_list_conversion_workspaces(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConversionWorkspacesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversion_workspaces(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest, dict]]): + The request object. Retrieve a list of all conversion + workspaces in a given project and + location. + parent (:class:`str`): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesAsyncPager: + Response message for + 'ListConversionWorkspaces' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.ListConversionWorkspacesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_conversion_workspaces, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListConversionWorkspacesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_conversion_workspace(self, + request: Optional[Union[clouddms.CreateConversionWorkspaceRequest, dict]] = None, + *, + parent: Optional[str] = None, + conversion_workspace: Optional[conversionworkspace_resources.ConversionWorkspace] = None, + conversion_workspace_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new conversion workspace in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_create_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.CreateConversionWorkspaceRequest( + parent="parent_value", + conversion_workspace_id="conversion_workspace_id_value", + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.create_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest, dict]]): + The request object. Request message to create a new + Conversion Workspace in the specified + project and region. + parent (:class:`str`): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace (:class:`google.cloud.clouddms_v1.types.ConversionWorkspace`): + Required. Represents a conversion + workspace object. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace_id (:class:`str`): + Required. The ID of the conversion + workspace to create. + + This corresponds to the ``conversion_workspace_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, conversion_workspace, conversion_workspace_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.CreateConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if conversion_workspace_id is not None: + request.conversion_workspace_id = conversion_workspace_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_conversion_workspace(self, + request: Optional[Union[clouddms.UpdateConversionWorkspaceRequest, dict]] = None, + *, + conversion_workspace: Optional[conversionworkspace_resources.ConversionWorkspace] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single conversion + workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_update_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.UpdateConversionWorkspaceRequest( + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.update_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'UpdateConversionWorkspace' request. + conversion_workspace (:class:`google.cloud.clouddms_v1.types.ConversionWorkspace`): + Required. The conversion workspace + parameters to update. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([conversion_workspace, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.UpdateConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace.name", request.conversion_workspace.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_conversion_workspace(self, + request: Optional[Union[clouddms.DeleteConversionWorkspaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_delete_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'DeleteConversionWorkspace' request. + name (:class:`str`): + Required. Name of the conversion + workspace resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.DeleteConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def seed_conversion_workspace(self, + request: Optional[Union[clouddms.SeedConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports a snapshot of the source database into the + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_seed_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.SeedConversionWorkspaceRequest( + source_connection_profile="source_connection_profile_value", + ) + + # Make the request + operation = client.seed_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'SeedConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.SeedConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.seed_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def import_mapping_rules(self, + request: Optional[Union[clouddms.ImportMappingRulesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_import_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ImportMappingRulesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.import_mapping_rules(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ImportMappingRulesRequest, dict]]): + The request object. Request message for + 'ImportMappingRules' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.ImportMappingRulesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_mapping_rules, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def convert_conversion_workspace(self, + request: Optional[Union[clouddms.ConvertConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a draft tree schema for the destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_convert_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ConvertConversionWorkspaceRequest( + ) + + # Make the request + operation = client.convert_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'ConvertConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.ConvertConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.convert_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def commit_conversion_workspace(self, + request: Optional[Union[clouddms.CommitConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Marks all the data in the conversion workspace as + committed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_commit_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.CommitConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.commit_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'CommitConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.CommitConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.commit_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def rollback_conversion_workspace(self, + request: Optional[Union[clouddms.RollbackConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Rolls back a conversion workspace to the last + committed snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_rollback_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.RollbackConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'RollbackConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.RollbackConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rollback_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def apply_conversion_workspace(self, + request: Optional[Union[clouddms.ApplyConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Applies draft tree onto a specific destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_apply_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ApplyConversionWorkspaceRequest( + connection_profile="connection_profile_value", + name="name_value", + ) + + # Make the request + operation = client.apply_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'ApplyConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.ApplyConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.apply_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def describe_database_entities(self, + request: Optional[Union[clouddms.DescribeDatabaseEntitiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.DescribeDatabaseEntitiesAsyncPager: + r"""Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_describe_database_entities(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeDatabaseEntitiesRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + page_result = client.describe_database_entities(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest, dict]]): + The request object. Request message for + 'DescribeDatabaseEntities' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesAsyncPager: + Response message for + 'DescribeDatabaseEntities' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = clouddms.DescribeDatabaseEntitiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.describe_database_entities, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace", request.conversion_workspace), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.DescribeDatabaseEntitiesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def search_background_jobs(self, + request: Optional[Union[clouddms.SearchBackgroundJobsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.SearchBackgroundJobsResponse: + r"""Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_search_background_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.SearchBackgroundJobsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = await client.search_background_jobs(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest, dict]]): + The request object. Request message for + 'SearchBackgroundJobs' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse: + Response message for + 'SearchBackgroundJobs' request. + + """ + # Create or coerce a protobuf request object. + request = clouddms.SearchBackgroundJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_background_jobs, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace", request.conversion_workspace), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def describe_conversion_workspace_revisions(self, + request: Optional[Union[clouddms.DescribeConversionWorkspaceRevisionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.DescribeConversionWorkspaceRevisionsResponse: + r"""Retrieves a list of committed revisions of a specific + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_describe_conversion_workspace_revisions(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = await client.describe_conversion_workspace_revisions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest, dict]]): + The request object. Request message for + 'DescribeConversionWorkspaceRevisions' + request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse: + Response message for + 'DescribeConversionWorkspaceRevisions' + request. + + """ + # Create or coerce a protobuf request object. + request = clouddms.DescribeConversionWorkspaceRevisionsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.describe_conversion_workspace_revisions, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace", request.conversion_workspace), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_static_ips(self, + request: Optional[Union[clouddms.FetchStaticIpsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchStaticIpsAsyncPager: + r"""Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_fetch_static_ips(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.FetchStaticIpsRequest( + name="name_value", + ) + + # Make the request + page_result = client.fetch_static_ips(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.FetchStaticIpsRequest, dict]]): + The request object. Request message for 'FetchStaticIps' + request. + name (:class:`str`): + Required. The resource name for the location for which + static IPs should be returned. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsAsyncPager: + Response message for a + 'FetchStaticIps' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = clouddms.FetchStaticIpsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_static_ips, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchStaticIpsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DataMigrationServiceAsyncClient", +) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/client.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/client.py new file mode 100644 index 0000000..6cae9f9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/client.py @@ -0,0 +1,5053 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.clouddms_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.clouddms_v1.services.data_migration_service import pagers +from google.cloud.clouddms_v1.types import clouddms +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import DataMigrationServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DataMigrationServiceGrpcTransport +from .transports.grpc_asyncio import DataMigrationServiceGrpcAsyncIOTransport + + +class DataMigrationServiceClientMeta(type): + """Metaclass for the DataMigrationService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DataMigrationServiceTransport]] + _transport_registry["grpc"] = DataMigrationServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DataMigrationServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DataMigrationServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataMigrationServiceClient(metaclass=DataMigrationServiceClientMeta): + """Database Migration service""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "datamigration.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataMigrationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataMigrationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataMigrationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataMigrationServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def connection_profile_path(project: str,location: str,connection_profile: str,) -> str: + """Returns a fully-qualified connection_profile string.""" + return "projects/{project}/locations/{location}/connectionProfiles/{connection_profile}".format(project=project, location=location, connection_profile=connection_profile, ) + + @staticmethod + def parse_connection_profile_path(path: str) -> Dict[str,str]: + """Parses a connection_profile path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connectionProfiles/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def conversion_workspace_path(project: str,location: str,conversion_workspace: str,) -> str: + """Returns a fully-qualified conversion_workspace string.""" + return "projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}".format(project=project, location=location, conversion_workspace=conversion_workspace, ) + + @staticmethod + def parse_conversion_workspace_path(path: str) -> Dict[str,str]: + """Parses a conversion_workspace path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/conversionWorkspaces/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def migration_job_path(project: str,location: str,migration_job: str,) -> str: + """Returns a fully-qualified migration_job string.""" + return "projects/{project}/locations/{location}/migrationJobs/{migration_job}".format(project=project, location=location, migration_job=migration_job, ) + + @staticmethod + def parse_migration_job_path(path: str) -> Dict[str,str]: + """Parses a migration_job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/migrationJobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def networks_path(project: str,network: str,) -> str: + """Returns a fully-qualified networks string.""" + return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + + @staticmethod + def parse_networks_path(path: str) -> Dict[str,str]: + """Parses a networks path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def private_connection_path(project: str,location: str,private_connection: str,) -> str: + """Returns a fully-qualified private_connection string.""" + return "projects/{project}/locations/{location}/privateConnections/{private_connection}".format(project=project, location=location, private_connection=private_connection, ) + + @staticmethod + def parse_private_connection_path(path: str) -> Dict[str,str]: + """Parses a private_connection path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/privateConnections/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataMigrationServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data migration service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DataMigrationServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DataMigrationServiceTransport): + # transport is a DataMigrationServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_migration_jobs(self, + request: Optional[Union[clouddms.ListMigrationJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMigrationJobsPager: + r"""Lists migration jobs in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_list_migration_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListMigrationJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ListMigrationJobsRequest, dict]): + The request object. Retrieves a list of all migration + jobs in a given project and location. + parent (str): + Required. The parent which owns this + collection of migrationJobs. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsPager: + Response message for + 'ListMigrationJobs' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ListMigrationJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ListMigrationJobsRequest): + request = clouddms.ListMigrationJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_migration_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMigrationJobsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_migration_job(self, + request: Optional[Union[clouddms.GetMigrationJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.MigrationJob: + r"""Gets details of a single migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_get_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetMigrationJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GetMigrationJobRequest, dict]): + The request object. Request message for 'GetMigrationJob' + request. + name (str): + Required. Name of the migration job + resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.MigrationJob: + Represents a Database Migration + Service migration job object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.GetMigrationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.GetMigrationJobRequest): + request = clouddms.GetMigrationJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_migration_job(self, + request: Optional[Union[clouddms.CreateMigrationJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + migration_job: Optional[clouddms_resources.MigrationJob] = None, + migration_job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new migration job in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_create_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.CreateMigrationJobRequest( + parent="parent_value", + migration_job_id="migration_job_id_value", + migration_job=migration_job, + ) + + # Make the request + operation = client.create_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CreateMigrationJobRequest, dict]): + The request object. Request message to create a new + Database Migration Service migration job + in the specified project and region. + parent (str): + Required. The parent which owns this + collection of migration jobs. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + migration_job (google.cloud.clouddms_v1.types.MigrationJob): + Required. Represents a `migration + job `__ + object. + + This corresponds to the ``migration_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + migration_job_id (str): + Required. The ID of the instance to + create. + + This corresponds to the ``migration_job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, migration_job, migration_job_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.CreateMigrationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.CreateMigrationJobRequest): + request = clouddms.CreateMigrationJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if migration_job is not None: + request.migration_job = migration_job + if migration_job_id is not None: + request.migration_job_id = migration_job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_migration_job(self, + request: Optional[Union[clouddms.UpdateMigrationJobRequest, dict]] = None, + *, + migration_job: Optional[clouddms_resources.MigrationJob] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_update_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.UpdateMigrationJobRequest( + migration_job=migration_job, + ) + + # Make the request + operation = client.update_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.UpdateMigrationJobRequest, dict]): + The request object. Request message for + 'UpdateMigrationJob' request. + migration_job (google.cloud.clouddms_v1.types.MigrationJob): + Required. The migration job + parameters to update. + + This corresponds to the ``migration_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([migration_job, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.UpdateMigrationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.UpdateMigrationJobRequest): + request = clouddms.UpdateMigrationJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if migration_job is not None: + request.migration_job = migration_job + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("migration_job.name", request.migration_job.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_migration_job(self, + request: Optional[Union[clouddms.DeleteMigrationJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_delete_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteMigrationJobRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DeleteMigrationJobRequest, dict]): + The request object. Request message for + 'DeleteMigrationJob' request. + name (str): + Required. Name of the migration job + resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.DeleteMigrationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.DeleteMigrationJobRequest): + request = clouddms.DeleteMigrationJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def start_migration_job(self, + request: Optional[Union[clouddms.StartMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Start an already created migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_start_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.StartMigrationJobRequest( + ) + + # Make the request + operation = client.start_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.StartMigrationJobRequest, dict]): + The request object. Request message for + 'StartMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.StartMigrationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.StartMigrationJobRequest): + request = clouddms.StartMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def stop_migration_job(self, + request: Optional[Union[clouddms.StopMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Stops a running migration job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_stop_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.StopMigrationJobRequest( + ) + + # Make the request + operation = client.stop_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.StopMigrationJobRequest, dict]): + The request object. Request message for + 'StopMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.StopMigrationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.StopMigrationJobRequest): + request = clouddms.StopMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def resume_migration_job(self, + request: Optional[Union[clouddms.ResumeMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Resume a migration job that is currently stopped and + is resumable (was stopped during CDC phase). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_resume_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ResumeMigrationJobRequest( + ) + + # Make the request + operation = client.resume_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ResumeMigrationJobRequest, dict]): + The request object. Request message for + 'ResumeMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ResumeMigrationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ResumeMigrationJobRequest): + request = clouddms.ResumeMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def promote_migration_job(self, + request: Optional[Union[clouddms.PromoteMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Promote a migration job, stopping replication to the + destination and promoting the destination to be a + standalone database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_promote_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.PromoteMigrationJobRequest( + ) + + # Make the request + operation = client.promote_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.PromoteMigrationJobRequest, dict]): + The request object. Request message for + 'PromoteMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.PromoteMigrationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.PromoteMigrationJobRequest): + request = clouddms.PromoteMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.promote_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def verify_migration_job(self, + request: Optional[Union[clouddms.VerifyMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Verify a migration job, making sure the destination + can reach the source and that all configuration and + prerequisites are met. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_verify_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.VerifyMigrationJobRequest( + ) + + # Make the request + operation = client.verify_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.VerifyMigrationJobRequest, dict]): + The request object. Request message for + 'VerifyMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.VerifyMigrationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.VerifyMigrationJobRequest): + request = clouddms.VerifyMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.verify_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def restart_migration_job(self, + request: Optional[Union[clouddms.RestartMigrationJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Restart a stopped or failed migration job, resetting + the destination instance to its original state and + starting the migration process from scratch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_restart_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.RestartMigrationJobRequest( + ) + + # Make the request + operation = client.restart_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.RestartMigrationJobRequest, dict]): + The request object. Request message for + 'RestartMigrationJob' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.MigrationJob` + Represents a Database Migration Service migration job + object. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.RestartMigrationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.RestartMigrationJobRequest): + request = clouddms.RestartMigrationJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restart_migration_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.MigrationJob, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def generate_ssh_script(self, + request: Optional[Union[clouddms.GenerateSshScriptRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.SshScript: + r"""Generate a SSH configuration script to configure the + reverse SSH connectivity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_generate_ssh_script(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + vm_creation_config = clouddms_v1.VmCreationConfig() + vm_creation_config.vm_machine_type = "vm_machine_type_value" + + request = clouddms_v1.GenerateSshScriptRequest( + vm_creation_config=vm_creation_config, + vm="vm_value", + ) + + # Make the request + response = client.generate_ssh_script(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GenerateSshScriptRequest, dict]): + The request object. Request message for + 'GenerateSshScript' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.SshScript: + Response message for + 'GenerateSshScript' request. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.GenerateSshScriptRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.GenerateSshScriptRequest): + request = clouddms.GenerateSshScriptRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_ssh_script] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("migration_job", request.migration_job), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_connection_profiles(self, + request: Optional[Union[clouddms.ListConnectionProfilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConnectionProfilesPager: + r"""Retrieves a list of all connection profiles in a + given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_list_connection_profiles(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConnectionProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connection_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ListConnectionProfilesRequest, dict]): + The request object. Request message for + 'ListConnectionProfiles' request. + parent (str): + Required. The parent which owns this + collection of connection profiles. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesPager: + Response message for + 'ListConnectionProfiles' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ListConnectionProfilesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ListConnectionProfilesRequest): + request = clouddms.ListConnectionProfilesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_connection_profiles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListConnectionProfilesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_connection_profile(self, + request: Optional[Union[clouddms.GetConnectionProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.ConnectionProfile: + r"""Gets details of a single connection profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_get_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConnectionProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_connection_profile(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GetConnectionProfileRequest, dict]): + The request object. Request message for + 'GetConnectionProfile' request. + name (str): + Required. Name of the connection + profile resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.ConnectionProfile: + A connection profile definition. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.GetConnectionProfileRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.GetConnectionProfileRequest): + request = clouddms.GetConnectionProfileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_connection_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_connection_profile(self, + request: Optional[Union[clouddms.CreateConnectionProfileRequest, dict]] = None, + *, + parent: Optional[str] = None, + connection_profile: Optional[clouddms_resources.ConnectionProfile] = None, + connection_profile_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new connection profile in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_create_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.CreateConnectionProfileRequest( + parent="parent_value", + connection_profile_id="connection_profile_id_value", + connection_profile=connection_profile, + ) + + # Make the request + operation = client.create_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CreateConnectionProfileRequest, dict]): + The request object. Request message for + 'CreateConnectionProfile' request. + parent (str): + Required. The parent which owns this + collection of connection profiles. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_profile (google.cloud.clouddms_v1.types.ConnectionProfile): + Required. The create request body + including the connection profile data + + This corresponds to the ``connection_profile`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_profile_id (str): + Required. The connection profile + identifier. + + This corresponds to the ``connection_profile_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConnectionProfile` + A connection profile definition. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, connection_profile, connection_profile_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.CreateConnectionProfileRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.CreateConnectionProfileRequest): + request = clouddms.CreateConnectionProfileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if connection_profile is not None: + request.connection_profile = connection_profile + if connection_profile_id is not None: + request.connection_profile_id = connection_profile_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_connection_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.ConnectionProfile, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_connection_profile(self, + request: Optional[Union[clouddms.UpdateConnectionProfileRequest, dict]] = None, + *, + connection_profile: Optional[clouddms_resources.ConnectionProfile] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Update the configuration of a single connection + profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_update_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.UpdateConnectionProfileRequest( + connection_profile=connection_profile, + ) + + # Make the request + operation = client.update_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.UpdateConnectionProfileRequest, dict]): + The request object. Request message for + 'UpdateConnectionProfile' request. + connection_profile (google.cloud.clouddms_v1.types.ConnectionProfile): + Required. The connection profile + parameters to update. + + This corresponds to the ``connection_profile`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConnectionProfile` + A connection profile definition. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([connection_profile, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.UpdateConnectionProfileRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.UpdateConnectionProfileRequest): + request = clouddms.UpdateConnectionProfileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if connection_profile is not None: + request.connection_profile = connection_profile + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_connection_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("connection_profile.name", request.connection_profile.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.ConnectionProfile, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_connection_profile(self, + request: Optional[Union[clouddms.DeleteConnectionProfileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Database Migration Service + connection profile. A connection profile can only be + deleted if it is not in use by any active migration + jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_delete_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConnectionProfileRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest, dict]): + The request object. Request message for + 'DeleteConnectionProfile' request. + name (str): + Required. Name of the connection + profile resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.DeleteConnectionProfileRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.DeleteConnectionProfileRequest): + request = clouddms.DeleteConnectionProfileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_connection_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_private_connection(self, + request: Optional[Union[clouddms.CreatePrivateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + private_connection: Optional[clouddms_resources.PrivateConnection] = None, + private_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new private connection in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_create_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + private_connection = clouddms_v1.PrivateConnection() + private_connection.vpc_peering_config.vpc_name = "vpc_name_value" + private_connection.vpc_peering_config.subnet = "subnet_value" + + request = clouddms_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest, dict]): + The request object. Request message to create a new + private connection in the specified + project and region. + parent (str): + Required. The parent that owns the + collection of PrivateConnections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection (google.cloud.clouddms_v1.types.PrivateConnection): + Required. The private connection + resource to create. + + This corresponds to the ``private_connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection_id (str): + Required. The private connection + identifier. + + This corresponds to the ``private_connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.clouddms_v1.types.PrivateConnection` The PrivateConnection resource is used to establish private connectivity + with the customer's network. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, private_connection, private_connection_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.CreatePrivateConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.CreatePrivateConnectionRequest): + request = clouddms.CreatePrivateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if private_connection is not None: + request.private_connection = private_connection + if private_connection_id is not None: + request.private_connection_id = private_connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_private_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.PrivateConnection, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_private_connection(self, + request: Optional[Union[clouddms.GetPrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.PrivateConnection: + r"""Gets details of a single private connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_get_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetPrivateConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_private_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GetPrivateConnectionRequest, dict]): + The request object. Request message to get a private + connection resource. + name (str): + Required. The name of the private + connection to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.PrivateConnection: + The PrivateConnection resource is + used to establish private connectivity + with the customer's network. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.GetPrivateConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.GetPrivateConnectionRequest): + request = clouddms.GetPrivateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_private_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_private_connections(self, + request: Optional[Union[clouddms.ListPrivateConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPrivateConnectionsPager: + r"""Retrieves a list of private connections in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_list_private_connections(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListPrivateConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_private_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest, dict]): + The request object. Request message to retrieve a list of + private connections in a given project + and location. + parent (str): + Required. The parent that owns the + collection of private connections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsPager: + Response message for + 'ListPrivateConnections' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ListPrivateConnectionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ListPrivateConnectionsRequest): + request = clouddms.ListPrivateConnectionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_private_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPrivateConnectionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_private_connection(self, + request: Optional[Union[clouddms.DeletePrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Database Migration Service private + connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_delete_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeletePrivateConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest, dict]): + The request object. Request message to delete a private + connection. + name (str): + Required. The name of the private + connection to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.DeletePrivateConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.DeletePrivateConnectionRequest): + request = clouddms.DeletePrivateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_private_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_conversion_workspace(self, + request: Optional[Union[clouddms.GetConversionWorkspaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversionworkspace_resources.ConversionWorkspace: + r"""Gets details of a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_get_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = client.get_conversion_workspace(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest, dict]): + The request object. Request message for + 'GetConversionWorkspace' request. + name (str): + Required. Name of the conversion + workspace resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.ConversionWorkspace: + The main conversion workspace + resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.GetConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.GetConversionWorkspaceRequest): + request = clouddms.GetConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_conversion_workspaces(self, + request: Optional[Union[clouddms.ListConversionWorkspacesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConversionWorkspacesPager: + r"""Lists conversion workspaces in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_list_conversion_workspaces(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConversionWorkspacesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversion_workspaces(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest, dict]): + The request object. Retrieve a list of all conversion + workspaces in a given project and + location. + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesPager: + Response message for + 'ListConversionWorkspaces' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ListConversionWorkspacesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ListConversionWorkspacesRequest): + request = clouddms.ListConversionWorkspacesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_conversion_workspaces] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListConversionWorkspacesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_conversion_workspace(self, + request: Optional[Union[clouddms.CreateConversionWorkspaceRequest, dict]] = None, + *, + parent: Optional[str] = None, + conversion_workspace: Optional[conversionworkspace_resources.ConversionWorkspace] = None, + conversion_workspace_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new conversion workspace in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_create_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.CreateConversionWorkspaceRequest( + parent="parent_value", + conversion_workspace_id="conversion_workspace_id_value", + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.create_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest, dict]): + The request object. Request message to create a new + Conversion Workspace in the specified + project and region. + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. Represents a conversion + workspace object. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace_id (str): + Required. The ID of the conversion + workspace to create. + + This corresponds to the ``conversion_workspace_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, conversion_workspace, conversion_workspace_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.CreateConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.CreateConversionWorkspaceRequest): + request = clouddms.CreateConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if conversion_workspace_id is not None: + request.conversion_workspace_id = conversion_workspace_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_conversion_workspace(self, + request: Optional[Union[clouddms.UpdateConversionWorkspaceRequest, dict]] = None, + *, + conversion_workspace: Optional[conversionworkspace_resources.ConversionWorkspace] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single conversion + workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_update_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.UpdateConversionWorkspaceRequest( + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.update_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest, dict]): + The request object. Request message for + 'UpdateConversionWorkspace' request. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. The conversion workspace + parameters to update. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([conversion_workspace, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.UpdateConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.UpdateConversionWorkspaceRequest): + request = clouddms.UpdateConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace.name", request.conversion_workspace.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_conversion_workspace(self, + request: Optional[Union[clouddms.DeleteConversionWorkspaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_delete_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest, dict]): + The request object. Request message for + 'DeleteConversionWorkspace' request. + name (str): + Required. Name of the conversion + workspace resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.DeleteConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.DeleteConversionWorkspaceRequest): + request = clouddms.DeleteConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def seed_conversion_workspace(self, + request: Optional[Union[clouddms.SeedConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports a snapshot of the source database into the + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_seed_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.SeedConversionWorkspaceRequest( + source_connection_profile="source_connection_profile_value", + ) + + # Make the request + operation = client.seed_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest, dict]): + The request object. Request message for + 'SeedConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.SeedConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.SeedConversionWorkspaceRequest): + request = clouddms.SeedConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.seed_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def import_mapping_rules(self, + request: Optional[Union[clouddms.ImportMappingRulesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_import_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ImportMappingRulesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.import_mapping_rules(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ImportMappingRulesRequest, dict]): + The request object. Request message for + 'ImportMappingRules' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ImportMappingRulesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ImportMappingRulesRequest): + request = clouddms.ImportMappingRulesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_mapping_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def convert_conversion_workspace(self, + request: Optional[Union[clouddms.ConvertConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a draft tree schema for the destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_convert_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ConvertConversionWorkspaceRequest( + ) + + # Make the request + operation = client.convert_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest, dict]): + The request object. Request message for + 'ConvertConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ConvertConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ConvertConversionWorkspaceRequest): + request = clouddms.ConvertConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.convert_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def commit_conversion_workspace(self, + request: Optional[Union[clouddms.CommitConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Marks all the data in the conversion workspace as + committed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_commit_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.CommitConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.commit_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest, dict]): + The request object. Request message for + 'CommitConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.CommitConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.CommitConversionWorkspaceRequest): + request = clouddms.CommitConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.commit_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def rollback_conversion_workspace(self, + request: Optional[Union[clouddms.RollbackConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Rolls back a conversion workspace to the last + committed snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_rollback_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.RollbackConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest, dict]): + The request object. Request message for + 'RollbackConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.RollbackConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.RollbackConversionWorkspaceRequest): + request = clouddms.RollbackConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rollback_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def apply_conversion_workspace(self, + request: Optional[Union[clouddms.ApplyConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Applies draft tree onto a specific destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_apply_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ApplyConversionWorkspaceRequest( + connection_profile="connection_profile_value", + name="name_value", + ) + + # Make the request + operation = client.apply_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest, dict]): + The request object. Request message for + 'ApplyConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ApplyConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ApplyConversionWorkspaceRequest): + request = clouddms.ApplyConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.apply_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def describe_database_entities(self, + request: Optional[Union[clouddms.DescribeDatabaseEntitiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.DescribeDatabaseEntitiesPager: + r"""Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_describe_database_entities(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeDatabaseEntitiesRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + page_result = client.describe_database_entities(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest, dict]): + The request object. Request message for + 'DescribeDatabaseEntities' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesPager: + Response message for + 'DescribeDatabaseEntities' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.DescribeDatabaseEntitiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.DescribeDatabaseEntitiesRequest): + request = clouddms.DescribeDatabaseEntitiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.describe_database_entities] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace", request.conversion_workspace), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.DescribeDatabaseEntitiesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def search_background_jobs(self, + request: Optional[Union[clouddms.SearchBackgroundJobsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.SearchBackgroundJobsResponse: + r"""Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_search_background_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.SearchBackgroundJobsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = client.search_background_jobs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest, dict]): + The request object. Request message for + 'SearchBackgroundJobs' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse: + Response message for + 'SearchBackgroundJobs' request. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.SearchBackgroundJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.SearchBackgroundJobsRequest): + request = clouddms.SearchBackgroundJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_background_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace", request.conversion_workspace), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def describe_conversion_workspace_revisions(self, + request: Optional[Union[clouddms.DescribeConversionWorkspaceRevisionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.DescribeConversionWorkspaceRevisionsResponse: + r"""Retrieves a list of committed revisions of a specific + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_describe_conversion_workspace_revisions(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = client.describe_conversion_workspace_revisions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest, dict]): + The request object. Request message for + 'DescribeConversionWorkspaceRevisions' + request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse: + Response message for + 'DescribeConversionWorkspaceRevisions' + request. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.DescribeConversionWorkspaceRevisionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.DescribeConversionWorkspaceRevisionsRequest): + request = clouddms.DescribeConversionWorkspaceRevisionsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.describe_conversion_workspace_revisions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("conversion_workspace", request.conversion_workspace), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_static_ips(self, + request: Optional[Union[clouddms.FetchStaticIpsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchStaticIpsPager: + r"""Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_fetch_static_ips(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.FetchStaticIpsRequest( + name="name_value", + ) + + # Make the request + page_result = client.fetch_static_ips(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.FetchStaticIpsRequest, dict]): + The request object. Request message for 'FetchStaticIps' + request. + name (str): + Required. The resource name for the location for which + static IPs should be returned. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsPager: + Response message for a + 'FetchStaticIps' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.FetchStaticIpsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.FetchStaticIpsRequest): + request = clouddms.FetchStaticIpsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_static_ips] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchStaticIpsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DataMigrationServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DataMigrationServiceClient", +) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/pagers.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/pagers.py new file mode 100644 index 0000000..a543082 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/pagers.py @@ -0,0 +1,746 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.clouddms_v1.types import clouddms +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources + + +class ListMigrationJobsPager: + """A pager for iterating through ``list_migration_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListMigrationJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``migration_jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMigrationJobs`` requests and continue to iterate + through the ``migration_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListMigrationJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., clouddms.ListMigrationJobsResponse], + request: clouddms.ListMigrationJobsRequest, + response: clouddms.ListMigrationJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListMigrationJobsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListMigrationJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListMigrationJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.ListMigrationJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[clouddms_resources.MigrationJob]: + for page in self.pages: + yield from page.migration_jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMigrationJobsAsyncPager: + """A pager for iterating through ``list_migration_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListMigrationJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``migration_jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMigrationJobs`` requests and continue to iterate + through the ``migration_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListMigrationJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[clouddms.ListMigrationJobsResponse]], + request: clouddms.ListMigrationJobsRequest, + response: clouddms.ListMigrationJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListMigrationJobsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListMigrationJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListMigrationJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.ListMigrationJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[clouddms_resources.MigrationJob]: + async def async_generator(): + async for page in self.pages: + for response in page.migration_jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListConnectionProfilesPager: + """A pager for iterating through ``list_connection_profiles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListConnectionProfilesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``connection_profiles`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListConnectionProfiles`` requests and continue to iterate + through the ``connection_profiles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListConnectionProfilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., clouddms.ListConnectionProfilesResponse], + request: clouddms.ListConnectionProfilesRequest, + response: clouddms.ListConnectionProfilesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListConnectionProfilesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListConnectionProfilesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListConnectionProfilesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.ListConnectionProfilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[clouddms_resources.ConnectionProfile]: + for page in self.pages: + yield from page.connection_profiles + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListConnectionProfilesAsyncPager: + """A pager for iterating through ``list_connection_profiles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListConnectionProfilesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``connection_profiles`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListConnectionProfiles`` requests and continue to iterate + through the ``connection_profiles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListConnectionProfilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[clouddms.ListConnectionProfilesResponse]], + request: clouddms.ListConnectionProfilesRequest, + response: clouddms.ListConnectionProfilesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListConnectionProfilesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListConnectionProfilesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListConnectionProfilesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.ListConnectionProfilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[clouddms_resources.ConnectionProfile]: + async def async_generator(): + async for page in self.pages: + for response in page.connection_profiles: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPrivateConnectionsPager: + """A pager for iterating through ``list_private_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``private_connections`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPrivateConnections`` requests and continue to iterate + through the ``private_connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., clouddms.ListPrivateConnectionsResponse], + request: clouddms.ListPrivateConnectionsRequest, + response: clouddms.ListPrivateConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListPrivateConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.ListPrivateConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[clouddms_resources.PrivateConnection]: + for page in self.pages: + yield from page.private_connections + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPrivateConnectionsAsyncPager: + """A pager for iterating through ``list_private_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``private_connections`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPrivateConnections`` requests and continue to iterate + through the ``private_connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[clouddms.ListPrivateConnectionsResponse]], + request: clouddms.ListPrivateConnectionsRequest, + response: clouddms.ListPrivateConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListPrivateConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.ListPrivateConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[clouddms_resources.PrivateConnection]: + async def async_generator(): + async for page in self.pages: + for response in page.private_connections: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListConversionWorkspacesPager: + """A pager for iterating through ``list_conversion_workspaces`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``conversion_workspaces`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListConversionWorkspaces`` requests and continue to iterate + through the ``conversion_workspaces`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., clouddms.ListConversionWorkspacesResponse], + request: clouddms.ListConversionWorkspacesRequest, + response: clouddms.ListConversionWorkspacesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListConversionWorkspacesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.ListConversionWorkspacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[conversionworkspace_resources.ConversionWorkspace]: + for page in self.pages: + yield from page.conversion_workspaces + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListConversionWorkspacesAsyncPager: + """A pager for iterating through ``list_conversion_workspaces`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``conversion_workspaces`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListConversionWorkspaces`` requests and continue to iterate + through the ``conversion_workspaces`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[clouddms.ListConversionWorkspacesResponse]], + request: clouddms.ListConversionWorkspacesRequest, + response: clouddms.ListConversionWorkspacesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListConversionWorkspacesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.ListConversionWorkspacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[conversionworkspace_resources.ConversionWorkspace]: + async def async_generator(): + async for page in self.pages: + for response in page.conversion_workspaces: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class DescribeDatabaseEntitiesPager: + """A pager for iterating through ``describe_database_entities`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``database_entities`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``DescribeDatabaseEntities`` requests and continue to iterate + through the ``database_entities`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., clouddms.DescribeDatabaseEntitiesResponse], + request: clouddms.DescribeDatabaseEntitiesRequest, + response: clouddms.DescribeDatabaseEntitiesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.DescribeDatabaseEntitiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.DescribeDatabaseEntitiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[conversionworkspace_resources.DatabaseEntity]: + for page in self.pages: + yield from page.database_entities + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class DescribeDatabaseEntitiesAsyncPager: + """A pager for iterating through ``describe_database_entities`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``database_entities`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``DescribeDatabaseEntities`` requests and continue to iterate + through the ``database_entities`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[clouddms.DescribeDatabaseEntitiesResponse]], + request: clouddms.DescribeDatabaseEntitiesRequest, + response: clouddms.DescribeDatabaseEntitiesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.DescribeDatabaseEntitiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.DescribeDatabaseEntitiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[conversionworkspace_resources.DatabaseEntity]: + async def async_generator(): + async for page in self.pages: + for response in page.database_entities: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class FetchStaticIpsPager: + """A pager for iterating through ``fetch_static_ips`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``static_ips`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchStaticIps`` requests and continue to iterate + through the ``static_ips`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., clouddms.FetchStaticIpsResponse], + request: clouddms.FetchStaticIpsRequest, + response: clouddms.FetchStaticIpsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.FetchStaticIpsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.FetchStaticIpsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.FetchStaticIpsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.FetchStaticIpsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[str]: + for page in self.pages: + yield from page.static_ips + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class FetchStaticIpsAsyncPager: + """A pager for iterating through ``fetch_static_ips`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``static_ips`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchStaticIps`` requests and continue to iterate + through the ``static_ips`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[clouddms.FetchStaticIpsResponse]], + request: clouddms.FetchStaticIpsRequest, + response: clouddms.FetchStaticIpsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.FetchStaticIpsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.FetchStaticIpsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.FetchStaticIpsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.FetchStaticIpsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[str]: + async def async_generator(): + async for page in self.pages: + for response in page.static_ips: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/__init__.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/__init__.py new file mode 100644 index 0000000..c5bfc5f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataMigrationServiceTransport +from .grpc import DataMigrationServiceGrpcTransport +from .grpc_asyncio import DataMigrationServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DataMigrationServiceTransport]] +_transport_registry['grpc'] = DataMigrationServiceGrpcTransport +_transport_registry['grpc_asyncio'] = DataMigrationServiceGrpcAsyncIOTransport + +__all__ = ( + 'DataMigrationServiceTransport', + 'DataMigrationServiceGrpcTransport', + 'DataMigrationServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py new file mode 100644 index 0000000..ddfe3fb --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py @@ -0,0 +1,733 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.clouddms_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.clouddms_v1.types import clouddms +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class DataMigrationServiceTransport(abc.ABC): + """Abstract transport class for DataMigrationService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'datamigration.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_migration_jobs: gapic_v1.method.wrap_method( + self.list_migration_jobs, + default_timeout=60.0, + client_info=client_info, + ), + self.get_migration_job: gapic_v1.method.wrap_method( + self.get_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.create_migration_job: gapic_v1.method.wrap_method( + self.create_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.update_migration_job: gapic_v1.method.wrap_method( + self.update_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_migration_job: gapic_v1.method.wrap_method( + self.delete_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.start_migration_job: gapic_v1.method.wrap_method( + self.start_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.stop_migration_job: gapic_v1.method.wrap_method( + self.stop_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.resume_migration_job: gapic_v1.method.wrap_method( + self.resume_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.promote_migration_job: gapic_v1.method.wrap_method( + self.promote_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.verify_migration_job: gapic_v1.method.wrap_method( + self.verify_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.restart_migration_job: gapic_v1.method.wrap_method( + self.restart_migration_job, + default_timeout=60.0, + client_info=client_info, + ), + self.generate_ssh_script: gapic_v1.method.wrap_method( + self.generate_ssh_script, + default_timeout=60.0, + client_info=client_info, + ), + self.list_connection_profiles: gapic_v1.method.wrap_method( + self.list_connection_profiles, + default_timeout=60.0, + client_info=client_info, + ), + self.get_connection_profile: gapic_v1.method.wrap_method( + self.get_connection_profile, + default_timeout=60.0, + client_info=client_info, + ), + self.create_connection_profile: gapic_v1.method.wrap_method( + self.create_connection_profile, + default_timeout=60.0, + client_info=client_info, + ), + self.update_connection_profile: gapic_v1.method.wrap_method( + self.update_connection_profile, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_connection_profile: gapic_v1.method.wrap_method( + self.delete_connection_profile, + default_timeout=60.0, + client_info=client_info, + ), + self.create_private_connection: gapic_v1.method.wrap_method( + self.create_private_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.get_private_connection: gapic_v1.method.wrap_method( + self.get_private_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.list_private_connections: gapic_v1.method.wrap_method( + self.list_private_connections, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_private_connection: gapic_v1.method.wrap_method( + self.delete_private_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.get_conversion_workspace: gapic_v1.method.wrap_method( + self.get_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.list_conversion_workspaces: gapic_v1.method.wrap_method( + self.list_conversion_workspaces, + default_timeout=60.0, + client_info=client_info, + ), + self.create_conversion_workspace: gapic_v1.method.wrap_method( + self.create_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.update_conversion_workspace: gapic_v1.method.wrap_method( + self.update_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_conversion_workspace: gapic_v1.method.wrap_method( + self.delete_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.seed_conversion_workspace: gapic_v1.method.wrap_method( + self.seed_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.import_mapping_rules: gapic_v1.method.wrap_method( + self.import_mapping_rules, + default_timeout=60.0, + client_info=client_info, + ), + self.convert_conversion_workspace: gapic_v1.method.wrap_method( + self.convert_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.commit_conversion_workspace: gapic_v1.method.wrap_method( + self.commit_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.rollback_conversion_workspace: gapic_v1.method.wrap_method( + self.rollback_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.apply_conversion_workspace: gapic_v1.method.wrap_method( + self.apply_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.describe_database_entities: gapic_v1.method.wrap_method( + self.describe_database_entities, + default_timeout=60.0, + client_info=client_info, + ), + self.search_background_jobs: gapic_v1.method.wrap_method( + self.search_background_jobs, + default_timeout=60.0, + client_info=client_info, + ), + self.describe_conversion_workspace_revisions: gapic_v1.method.wrap_method( + self.describe_conversion_workspace_revisions, + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_static_ips: gapic_v1.method.wrap_method( + self.fetch_static_ips, + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_migration_jobs(self) -> Callable[ + [clouddms.ListMigrationJobsRequest], + Union[ + clouddms.ListMigrationJobsResponse, + Awaitable[clouddms.ListMigrationJobsResponse] + ]]: + raise NotImplementedError() + + @property + def get_migration_job(self) -> Callable[ + [clouddms.GetMigrationJobRequest], + Union[ + clouddms_resources.MigrationJob, + Awaitable[clouddms_resources.MigrationJob] + ]]: + raise NotImplementedError() + + @property + def create_migration_job(self) -> Callable[ + [clouddms.CreateMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_migration_job(self) -> Callable[ + [clouddms.UpdateMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_migration_job(self) -> Callable[ + [clouddms.DeleteMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def start_migration_job(self) -> Callable[ + [clouddms.StartMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def stop_migration_job(self) -> Callable[ + [clouddms.StopMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def resume_migration_job(self) -> Callable[ + [clouddms.ResumeMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def promote_migration_job(self) -> Callable[ + [clouddms.PromoteMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def verify_migration_job(self) -> Callable[ + [clouddms.VerifyMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def restart_migration_job(self) -> Callable[ + [clouddms.RestartMigrationJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def generate_ssh_script(self) -> Callable[ + [clouddms.GenerateSshScriptRequest], + Union[ + clouddms.SshScript, + Awaitable[clouddms.SshScript] + ]]: + raise NotImplementedError() + + @property + def list_connection_profiles(self) -> Callable[ + [clouddms.ListConnectionProfilesRequest], + Union[ + clouddms.ListConnectionProfilesResponse, + Awaitable[clouddms.ListConnectionProfilesResponse] + ]]: + raise NotImplementedError() + + @property + def get_connection_profile(self) -> Callable[ + [clouddms.GetConnectionProfileRequest], + Union[ + clouddms_resources.ConnectionProfile, + Awaitable[clouddms_resources.ConnectionProfile] + ]]: + raise NotImplementedError() + + @property + def create_connection_profile(self) -> Callable[ + [clouddms.CreateConnectionProfileRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_connection_profile(self) -> Callable[ + [clouddms.UpdateConnectionProfileRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_connection_profile(self) -> Callable[ + [clouddms.DeleteConnectionProfileRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def create_private_connection(self) -> Callable[ + [clouddms.CreatePrivateConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_private_connection(self) -> Callable[ + [clouddms.GetPrivateConnectionRequest], + Union[ + clouddms_resources.PrivateConnection, + Awaitable[clouddms_resources.PrivateConnection] + ]]: + raise NotImplementedError() + + @property + def list_private_connections(self) -> Callable[ + [clouddms.ListPrivateConnectionsRequest], + Union[ + clouddms.ListPrivateConnectionsResponse, + Awaitable[clouddms.ListPrivateConnectionsResponse] + ]]: + raise NotImplementedError() + + @property + def delete_private_connection(self) -> Callable[ + [clouddms.DeletePrivateConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_conversion_workspace(self) -> Callable[ + [clouddms.GetConversionWorkspaceRequest], + Union[ + conversionworkspace_resources.ConversionWorkspace, + Awaitable[conversionworkspace_resources.ConversionWorkspace] + ]]: + raise NotImplementedError() + + @property + def list_conversion_workspaces(self) -> Callable[ + [clouddms.ListConversionWorkspacesRequest], + Union[ + clouddms.ListConversionWorkspacesResponse, + Awaitable[clouddms.ListConversionWorkspacesResponse] + ]]: + raise NotImplementedError() + + @property + def create_conversion_workspace(self) -> Callable[ + [clouddms.CreateConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_conversion_workspace(self) -> Callable[ + [clouddms.UpdateConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_conversion_workspace(self) -> Callable[ + [clouddms.DeleteConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def seed_conversion_workspace(self) -> Callable[ + [clouddms.SeedConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def import_mapping_rules(self) -> Callable[ + [clouddms.ImportMappingRulesRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def convert_conversion_workspace(self) -> Callable[ + [clouddms.ConvertConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def commit_conversion_workspace(self) -> Callable[ + [clouddms.CommitConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def rollback_conversion_workspace(self) -> Callable[ + [clouddms.RollbackConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def apply_conversion_workspace(self) -> Callable[ + [clouddms.ApplyConversionWorkspaceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def describe_database_entities(self) -> Callable[ + [clouddms.DescribeDatabaseEntitiesRequest], + Union[ + clouddms.DescribeDatabaseEntitiesResponse, + Awaitable[clouddms.DescribeDatabaseEntitiesResponse] + ]]: + raise NotImplementedError() + + @property + def search_background_jobs(self) -> Callable[ + [clouddms.SearchBackgroundJobsRequest], + Union[ + clouddms.SearchBackgroundJobsResponse, + Awaitable[clouddms.SearchBackgroundJobsResponse] + ]]: + raise NotImplementedError() + + @property + def describe_conversion_workspace_revisions(self) -> Callable[ + [clouddms.DescribeConversionWorkspaceRevisionsRequest], + Union[ + clouddms.DescribeConversionWorkspaceRevisionsResponse, + Awaitable[clouddms.DescribeConversionWorkspaceRevisionsResponse] + ]]: + raise NotImplementedError() + + @property + def fetch_static_ips(self) -> Callable[ + [clouddms.FetchStaticIpsRequest], + Union[ + clouddms.FetchStaticIpsResponse, + Awaitable[clouddms.FetchStaticIpsResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DataMigrationServiceTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py new file mode 100644 index 0000000..bfc3d2e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py @@ -0,0 +1,1430 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.clouddms_v1.types import clouddms +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from .base import DataMigrationServiceTransport, DEFAULT_CLIENT_INFO + + +class DataMigrationServiceGrpcTransport(DataMigrationServiceTransport): + """gRPC backend transport for DataMigrationService. + + Database Migration service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'datamigration.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'datamigration.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_migration_jobs(self) -> Callable[ + [clouddms.ListMigrationJobsRequest], + clouddms.ListMigrationJobsResponse]: + r"""Return a callable for the list migration jobs method over gRPC. + + Lists migration jobs in a given project and location. + + Returns: + Callable[[~.ListMigrationJobsRequest], + ~.ListMigrationJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_migration_jobs' not in self._stubs: + self._stubs['list_migration_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListMigrationJobs', + request_serializer=clouddms.ListMigrationJobsRequest.serialize, + response_deserializer=clouddms.ListMigrationJobsResponse.deserialize, + ) + return self._stubs['list_migration_jobs'] + + @property + def get_migration_job(self) -> Callable[ + [clouddms.GetMigrationJobRequest], + clouddms_resources.MigrationJob]: + r"""Return a callable for the get migration job method over gRPC. + + Gets details of a single migration job. + + Returns: + Callable[[~.GetMigrationJobRequest], + ~.MigrationJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_migration_job' not in self._stubs: + self._stubs['get_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetMigrationJob', + request_serializer=clouddms.GetMigrationJobRequest.serialize, + response_deserializer=clouddms_resources.MigrationJob.deserialize, + ) + return self._stubs['get_migration_job'] + + @property + def create_migration_job(self) -> Callable[ + [clouddms.CreateMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the create migration job method over gRPC. + + Creates a new migration job in a given project and + location. + + Returns: + Callable[[~.CreateMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_migration_job' not in self._stubs: + self._stubs['create_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreateMigrationJob', + request_serializer=clouddms.CreateMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_migration_job'] + + @property + def update_migration_job(self) -> Callable[ + [clouddms.UpdateMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the update migration job method over gRPC. + + Updates the parameters of a single migration job. + + Returns: + Callable[[~.UpdateMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_migration_job' not in self._stubs: + self._stubs['update_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/UpdateMigrationJob', + request_serializer=clouddms.UpdateMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_migration_job'] + + @property + def delete_migration_job(self) -> Callable[ + [clouddms.DeleteMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete migration job method over gRPC. + + Deletes a single migration job. + + Returns: + Callable[[~.DeleteMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_migration_job' not in self._stubs: + self._stubs['delete_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeleteMigrationJob', + request_serializer=clouddms.DeleteMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_migration_job'] + + @property + def start_migration_job(self) -> Callable[ + [clouddms.StartMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the start migration job method over gRPC. + + Start an already created migration job. + + Returns: + Callable[[~.StartMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'start_migration_job' not in self._stubs: + self._stubs['start_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/StartMigrationJob', + request_serializer=clouddms.StartMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['start_migration_job'] + + @property + def stop_migration_job(self) -> Callable[ + [clouddms.StopMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the stop migration job method over gRPC. + + Stops a running migration job. + + Returns: + Callable[[~.StopMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'stop_migration_job' not in self._stubs: + self._stubs['stop_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/StopMigrationJob', + request_serializer=clouddms.StopMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['stop_migration_job'] + + @property + def resume_migration_job(self) -> Callable[ + [clouddms.ResumeMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the resume migration job method over gRPC. + + Resume a migration job that is currently stopped and + is resumable (was stopped during CDC phase). + + Returns: + Callable[[~.ResumeMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'resume_migration_job' not in self._stubs: + self._stubs['resume_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ResumeMigrationJob', + request_serializer=clouddms.ResumeMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['resume_migration_job'] + + @property + def promote_migration_job(self) -> Callable[ + [clouddms.PromoteMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the promote migration job method over gRPC. + + Promote a migration job, stopping replication to the + destination and promoting the destination to be a + standalone database. + + Returns: + Callable[[~.PromoteMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'promote_migration_job' not in self._stubs: + self._stubs['promote_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/PromoteMigrationJob', + request_serializer=clouddms.PromoteMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['promote_migration_job'] + + @property + def verify_migration_job(self) -> Callable[ + [clouddms.VerifyMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the verify migration job method over gRPC. + + Verify a migration job, making sure the destination + can reach the source and that all configuration and + prerequisites are met. + + Returns: + Callable[[~.VerifyMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'verify_migration_job' not in self._stubs: + self._stubs['verify_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/VerifyMigrationJob', + request_serializer=clouddms.VerifyMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['verify_migration_job'] + + @property + def restart_migration_job(self) -> Callable[ + [clouddms.RestartMigrationJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the restart migration job method over gRPC. + + Restart a stopped or failed migration job, resetting + the destination instance to its original state and + starting the migration process from scratch. + + Returns: + Callable[[~.RestartMigrationJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'restart_migration_job' not in self._stubs: + self._stubs['restart_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/RestartMigrationJob', + request_serializer=clouddms.RestartMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['restart_migration_job'] + + @property + def generate_ssh_script(self) -> Callable[ + [clouddms.GenerateSshScriptRequest], + clouddms.SshScript]: + r"""Return a callable for the generate ssh script method over gRPC. + + Generate a SSH configuration script to configure the + reverse SSH connectivity. + + Returns: + Callable[[~.GenerateSshScriptRequest], + ~.SshScript]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_ssh_script' not in self._stubs: + self._stubs['generate_ssh_script'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GenerateSshScript', + request_serializer=clouddms.GenerateSshScriptRequest.serialize, + response_deserializer=clouddms.SshScript.deserialize, + ) + return self._stubs['generate_ssh_script'] + + @property + def list_connection_profiles(self) -> Callable[ + [clouddms.ListConnectionProfilesRequest], + clouddms.ListConnectionProfilesResponse]: + r"""Return a callable for the list connection profiles method over gRPC. + + Retrieves a list of all connection profiles in a + given project and location. + + Returns: + Callable[[~.ListConnectionProfilesRequest], + ~.ListConnectionProfilesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_connection_profiles' not in self._stubs: + self._stubs['list_connection_profiles'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListConnectionProfiles', + request_serializer=clouddms.ListConnectionProfilesRequest.serialize, + response_deserializer=clouddms.ListConnectionProfilesResponse.deserialize, + ) + return self._stubs['list_connection_profiles'] + + @property + def get_connection_profile(self) -> Callable[ + [clouddms.GetConnectionProfileRequest], + clouddms_resources.ConnectionProfile]: + r"""Return a callable for the get connection profile method over gRPC. + + Gets details of a single connection profile. + + Returns: + Callable[[~.GetConnectionProfileRequest], + ~.ConnectionProfile]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_connection_profile' not in self._stubs: + self._stubs['get_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetConnectionProfile', + request_serializer=clouddms.GetConnectionProfileRequest.serialize, + response_deserializer=clouddms_resources.ConnectionProfile.deserialize, + ) + return self._stubs['get_connection_profile'] + + @property + def create_connection_profile(self) -> Callable[ + [clouddms.CreateConnectionProfileRequest], + operations_pb2.Operation]: + r"""Return a callable for the create connection profile method over gRPC. + + Creates a new connection profile in a given project + and location. + + Returns: + Callable[[~.CreateConnectionProfileRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_connection_profile' not in self._stubs: + self._stubs['create_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreateConnectionProfile', + request_serializer=clouddms.CreateConnectionProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_connection_profile'] + + @property + def update_connection_profile(self) -> Callable[ + [clouddms.UpdateConnectionProfileRequest], + operations_pb2.Operation]: + r"""Return a callable for the update connection profile method over gRPC. + + Update the configuration of a single connection + profile. + + Returns: + Callable[[~.UpdateConnectionProfileRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_connection_profile' not in self._stubs: + self._stubs['update_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/UpdateConnectionProfile', + request_serializer=clouddms.UpdateConnectionProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_connection_profile'] + + @property + def delete_connection_profile(self) -> Callable[ + [clouddms.DeleteConnectionProfileRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete connection profile method over gRPC. + + Deletes a single Database Migration Service + connection profile. A connection profile can only be + deleted if it is not in use by any active migration + jobs. + + Returns: + Callable[[~.DeleteConnectionProfileRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_connection_profile' not in self._stubs: + self._stubs['delete_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeleteConnectionProfile', + request_serializer=clouddms.DeleteConnectionProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_connection_profile'] + + @property + def create_private_connection(self) -> Callable[ + [clouddms.CreatePrivateConnectionRequest], + operations_pb2.Operation]: + r"""Return a callable for the create private connection method over gRPC. + + Creates a new private connection in a given project + and location. + + Returns: + Callable[[~.CreatePrivateConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_private_connection' not in self._stubs: + self._stubs['create_private_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreatePrivateConnection', + request_serializer=clouddms.CreatePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_private_connection'] + + @property + def get_private_connection(self) -> Callable[ + [clouddms.GetPrivateConnectionRequest], + clouddms_resources.PrivateConnection]: + r"""Return a callable for the get private connection method over gRPC. + + Gets details of a single private connection. + + Returns: + Callable[[~.GetPrivateConnectionRequest], + ~.PrivateConnection]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_private_connection' not in self._stubs: + self._stubs['get_private_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetPrivateConnection', + request_serializer=clouddms.GetPrivateConnectionRequest.serialize, + response_deserializer=clouddms_resources.PrivateConnection.deserialize, + ) + return self._stubs['get_private_connection'] + + @property + def list_private_connections(self) -> Callable[ + [clouddms.ListPrivateConnectionsRequest], + clouddms.ListPrivateConnectionsResponse]: + r"""Return a callable for the list private connections method over gRPC. + + Retrieves a list of private connections in a given + project and location. + + Returns: + Callable[[~.ListPrivateConnectionsRequest], + ~.ListPrivateConnectionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_private_connections' not in self._stubs: + self._stubs['list_private_connections'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListPrivateConnections', + request_serializer=clouddms.ListPrivateConnectionsRequest.serialize, + response_deserializer=clouddms.ListPrivateConnectionsResponse.deserialize, + ) + return self._stubs['list_private_connections'] + + @property + def delete_private_connection(self) -> Callable[ + [clouddms.DeletePrivateConnectionRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete private connection method over gRPC. + + Deletes a single Database Migration Service private + connection. + + Returns: + Callable[[~.DeletePrivateConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_private_connection' not in self._stubs: + self._stubs['delete_private_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeletePrivateConnection', + request_serializer=clouddms.DeletePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_private_connection'] + + @property + def get_conversion_workspace(self) -> Callable[ + [clouddms.GetConversionWorkspaceRequest], + conversionworkspace_resources.ConversionWorkspace]: + r"""Return a callable for the get conversion workspace method over gRPC. + + Gets details of a single conversion workspace. + + Returns: + Callable[[~.GetConversionWorkspaceRequest], + ~.ConversionWorkspace]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_conversion_workspace' not in self._stubs: + self._stubs['get_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetConversionWorkspace', + request_serializer=clouddms.GetConversionWorkspaceRequest.serialize, + response_deserializer=conversionworkspace_resources.ConversionWorkspace.deserialize, + ) + return self._stubs['get_conversion_workspace'] + + @property + def list_conversion_workspaces(self) -> Callable[ + [clouddms.ListConversionWorkspacesRequest], + clouddms.ListConversionWorkspacesResponse]: + r"""Return a callable for the list conversion workspaces method over gRPC. + + Lists conversion workspaces in a given project and + location. + + Returns: + Callable[[~.ListConversionWorkspacesRequest], + ~.ListConversionWorkspacesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_conversion_workspaces' not in self._stubs: + self._stubs['list_conversion_workspaces'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListConversionWorkspaces', + request_serializer=clouddms.ListConversionWorkspacesRequest.serialize, + response_deserializer=clouddms.ListConversionWorkspacesResponse.deserialize, + ) + return self._stubs['list_conversion_workspaces'] + + @property + def create_conversion_workspace(self) -> Callable[ + [clouddms.CreateConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the create conversion workspace method over gRPC. + + Creates a new conversion workspace in a given project + and location. + + Returns: + Callable[[~.CreateConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_conversion_workspace' not in self._stubs: + self._stubs['create_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreateConversionWorkspace', + request_serializer=clouddms.CreateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_conversion_workspace'] + + @property + def update_conversion_workspace(self) -> Callable[ + [clouddms.UpdateConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the update conversion workspace method over gRPC. + + Updates the parameters of a single conversion + workspace. + + Returns: + Callable[[~.UpdateConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_conversion_workspace' not in self._stubs: + self._stubs['update_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/UpdateConversionWorkspace', + request_serializer=clouddms.UpdateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_conversion_workspace'] + + @property + def delete_conversion_workspace(self) -> Callable[ + [clouddms.DeleteConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete conversion workspace method over gRPC. + + Deletes a single conversion workspace. + + Returns: + Callable[[~.DeleteConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_conversion_workspace' not in self._stubs: + self._stubs['delete_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeleteConversionWorkspace', + request_serializer=clouddms.DeleteConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_conversion_workspace'] + + @property + def seed_conversion_workspace(self) -> Callable[ + [clouddms.SeedConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the seed conversion workspace method over gRPC. + + Imports a snapshot of the source database into the + conversion workspace. + + Returns: + Callable[[~.SeedConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'seed_conversion_workspace' not in self._stubs: + self._stubs['seed_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/SeedConversionWorkspace', + request_serializer=clouddms.SeedConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['seed_conversion_workspace'] + + @property + def import_mapping_rules(self) -> Callable[ + [clouddms.ImportMappingRulesRequest], + operations_pb2.Operation]: + r"""Return a callable for the import mapping rules method over gRPC. + + Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + Returns: + Callable[[~.ImportMappingRulesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'import_mapping_rules' not in self._stubs: + self._stubs['import_mapping_rules'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ImportMappingRules', + request_serializer=clouddms.ImportMappingRulesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['import_mapping_rules'] + + @property + def convert_conversion_workspace(self) -> Callable[ + [clouddms.ConvertConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the convert conversion workspace method over gRPC. + + Creates a draft tree schema for the destination + database. + + Returns: + Callable[[~.ConvertConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'convert_conversion_workspace' not in self._stubs: + self._stubs['convert_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ConvertConversionWorkspace', + request_serializer=clouddms.ConvertConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['convert_conversion_workspace'] + + @property + def commit_conversion_workspace(self) -> Callable[ + [clouddms.CommitConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the commit conversion workspace method over gRPC. + + Marks all the data in the conversion workspace as + committed. + + Returns: + Callable[[~.CommitConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'commit_conversion_workspace' not in self._stubs: + self._stubs['commit_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CommitConversionWorkspace', + request_serializer=clouddms.CommitConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['commit_conversion_workspace'] + + @property + def rollback_conversion_workspace(self) -> Callable[ + [clouddms.RollbackConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the rollback conversion workspace method over gRPC. + + Rolls back a conversion workspace to the last + committed snapshot. + + Returns: + Callable[[~.RollbackConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rollback_conversion_workspace' not in self._stubs: + self._stubs['rollback_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/RollbackConversionWorkspace', + request_serializer=clouddms.RollbackConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['rollback_conversion_workspace'] + + @property + def apply_conversion_workspace(self) -> Callable[ + [clouddms.ApplyConversionWorkspaceRequest], + operations_pb2.Operation]: + r"""Return a callable for the apply conversion workspace method over gRPC. + + Applies draft tree onto a specific destination + database. + + Returns: + Callable[[~.ApplyConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'apply_conversion_workspace' not in self._stubs: + self._stubs['apply_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ApplyConversionWorkspace', + request_serializer=clouddms.ApplyConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['apply_conversion_workspace'] + + @property + def describe_database_entities(self) -> Callable[ + [clouddms.DescribeDatabaseEntitiesRequest], + clouddms.DescribeDatabaseEntitiesResponse]: + r"""Return a callable for the describe database entities method over gRPC. + + Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + Returns: + Callable[[~.DescribeDatabaseEntitiesRequest], + ~.DescribeDatabaseEntitiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'describe_database_entities' not in self._stubs: + self._stubs['describe_database_entities'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DescribeDatabaseEntities', + request_serializer=clouddms.DescribeDatabaseEntitiesRequest.serialize, + response_deserializer=clouddms.DescribeDatabaseEntitiesResponse.deserialize, + ) + return self._stubs['describe_database_entities'] + + @property + def search_background_jobs(self) -> Callable[ + [clouddms.SearchBackgroundJobsRequest], + clouddms.SearchBackgroundJobsResponse]: + r"""Return a callable for the search background jobs method over gRPC. + + Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + Returns: + Callable[[~.SearchBackgroundJobsRequest], + ~.SearchBackgroundJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_background_jobs' not in self._stubs: + self._stubs['search_background_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/SearchBackgroundJobs', + request_serializer=clouddms.SearchBackgroundJobsRequest.serialize, + response_deserializer=clouddms.SearchBackgroundJobsResponse.deserialize, + ) + return self._stubs['search_background_jobs'] + + @property + def describe_conversion_workspace_revisions(self) -> Callable[ + [clouddms.DescribeConversionWorkspaceRevisionsRequest], + clouddms.DescribeConversionWorkspaceRevisionsResponse]: + r"""Return a callable for the describe conversion workspace + revisions method over gRPC. + + Retrieves a list of committed revisions of a specific + conversion workspace. + + Returns: + Callable[[~.DescribeConversionWorkspaceRevisionsRequest], + ~.DescribeConversionWorkspaceRevisionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'describe_conversion_workspace_revisions' not in self._stubs: + self._stubs['describe_conversion_workspace_revisions'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DescribeConversionWorkspaceRevisions', + request_serializer=clouddms.DescribeConversionWorkspaceRevisionsRequest.serialize, + response_deserializer=clouddms.DescribeConversionWorkspaceRevisionsResponse.deserialize, + ) + return self._stubs['describe_conversion_workspace_revisions'] + + @property + def fetch_static_ips(self) -> Callable[ + [clouddms.FetchStaticIpsRequest], + clouddms.FetchStaticIpsResponse]: + r"""Return a callable for the fetch static ips method over gRPC. + + Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + Returns: + Callable[[~.FetchStaticIpsRequest], + ~.FetchStaticIpsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_static_ips' not in self._stubs: + self._stubs['fetch_static_ips'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/FetchStaticIps', + request_serializer=clouddms.FetchStaticIpsRequest.serialize, + response_deserializer=clouddms.FetchStaticIpsResponse.deserialize, + ) + return self._stubs['fetch_static_ips'] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DataMigrationServiceGrpcTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py new file mode 100644 index 0000000..0dce4cb --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py @@ -0,0 +1,1429 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.clouddms_v1.types import clouddms +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from .base import DataMigrationServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import DataMigrationServiceGrpcTransport + + +class DataMigrationServiceGrpcAsyncIOTransport(DataMigrationServiceTransport): + """gRPC AsyncIO backend transport for DataMigrationService. + + Database Migration service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'datamigration.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'datamigration.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_migration_jobs(self) -> Callable[ + [clouddms.ListMigrationJobsRequest], + Awaitable[clouddms.ListMigrationJobsResponse]]: + r"""Return a callable for the list migration jobs method over gRPC. + + Lists migration jobs in a given project and location. + + Returns: + Callable[[~.ListMigrationJobsRequest], + Awaitable[~.ListMigrationJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_migration_jobs' not in self._stubs: + self._stubs['list_migration_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListMigrationJobs', + request_serializer=clouddms.ListMigrationJobsRequest.serialize, + response_deserializer=clouddms.ListMigrationJobsResponse.deserialize, + ) + return self._stubs['list_migration_jobs'] + + @property + def get_migration_job(self) -> Callable[ + [clouddms.GetMigrationJobRequest], + Awaitable[clouddms_resources.MigrationJob]]: + r"""Return a callable for the get migration job method over gRPC. + + Gets details of a single migration job. + + Returns: + Callable[[~.GetMigrationJobRequest], + Awaitable[~.MigrationJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_migration_job' not in self._stubs: + self._stubs['get_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetMigrationJob', + request_serializer=clouddms.GetMigrationJobRequest.serialize, + response_deserializer=clouddms_resources.MigrationJob.deserialize, + ) + return self._stubs['get_migration_job'] + + @property + def create_migration_job(self) -> Callable[ + [clouddms.CreateMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create migration job method over gRPC. + + Creates a new migration job in a given project and + location. + + Returns: + Callable[[~.CreateMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_migration_job' not in self._stubs: + self._stubs['create_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreateMigrationJob', + request_serializer=clouddms.CreateMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_migration_job'] + + @property + def update_migration_job(self) -> Callable[ + [clouddms.UpdateMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update migration job method over gRPC. + + Updates the parameters of a single migration job. + + Returns: + Callable[[~.UpdateMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_migration_job' not in self._stubs: + self._stubs['update_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/UpdateMigrationJob', + request_serializer=clouddms.UpdateMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_migration_job'] + + @property + def delete_migration_job(self) -> Callable[ + [clouddms.DeleteMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete migration job method over gRPC. + + Deletes a single migration job. + + Returns: + Callable[[~.DeleteMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_migration_job' not in self._stubs: + self._stubs['delete_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeleteMigrationJob', + request_serializer=clouddms.DeleteMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_migration_job'] + + @property + def start_migration_job(self) -> Callable[ + [clouddms.StartMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the start migration job method over gRPC. + + Start an already created migration job. + + Returns: + Callable[[~.StartMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'start_migration_job' not in self._stubs: + self._stubs['start_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/StartMigrationJob', + request_serializer=clouddms.StartMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['start_migration_job'] + + @property + def stop_migration_job(self) -> Callable[ + [clouddms.StopMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the stop migration job method over gRPC. + + Stops a running migration job. + + Returns: + Callable[[~.StopMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'stop_migration_job' not in self._stubs: + self._stubs['stop_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/StopMigrationJob', + request_serializer=clouddms.StopMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['stop_migration_job'] + + @property + def resume_migration_job(self) -> Callable[ + [clouddms.ResumeMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the resume migration job method over gRPC. + + Resume a migration job that is currently stopped and + is resumable (was stopped during CDC phase). + + Returns: + Callable[[~.ResumeMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'resume_migration_job' not in self._stubs: + self._stubs['resume_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ResumeMigrationJob', + request_serializer=clouddms.ResumeMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['resume_migration_job'] + + @property + def promote_migration_job(self) -> Callable[ + [clouddms.PromoteMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the promote migration job method over gRPC. + + Promote a migration job, stopping replication to the + destination and promoting the destination to be a + standalone database. + + Returns: + Callable[[~.PromoteMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'promote_migration_job' not in self._stubs: + self._stubs['promote_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/PromoteMigrationJob', + request_serializer=clouddms.PromoteMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['promote_migration_job'] + + @property + def verify_migration_job(self) -> Callable[ + [clouddms.VerifyMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the verify migration job method over gRPC. + + Verify a migration job, making sure the destination + can reach the source and that all configuration and + prerequisites are met. + + Returns: + Callable[[~.VerifyMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'verify_migration_job' not in self._stubs: + self._stubs['verify_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/VerifyMigrationJob', + request_serializer=clouddms.VerifyMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['verify_migration_job'] + + @property + def restart_migration_job(self) -> Callable[ + [clouddms.RestartMigrationJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the restart migration job method over gRPC. + + Restart a stopped or failed migration job, resetting + the destination instance to its original state and + starting the migration process from scratch. + + Returns: + Callable[[~.RestartMigrationJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'restart_migration_job' not in self._stubs: + self._stubs['restart_migration_job'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/RestartMigrationJob', + request_serializer=clouddms.RestartMigrationJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['restart_migration_job'] + + @property + def generate_ssh_script(self) -> Callable[ + [clouddms.GenerateSshScriptRequest], + Awaitable[clouddms.SshScript]]: + r"""Return a callable for the generate ssh script method over gRPC. + + Generate a SSH configuration script to configure the + reverse SSH connectivity. + + Returns: + Callable[[~.GenerateSshScriptRequest], + Awaitable[~.SshScript]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_ssh_script' not in self._stubs: + self._stubs['generate_ssh_script'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GenerateSshScript', + request_serializer=clouddms.GenerateSshScriptRequest.serialize, + response_deserializer=clouddms.SshScript.deserialize, + ) + return self._stubs['generate_ssh_script'] + + @property + def list_connection_profiles(self) -> Callable[ + [clouddms.ListConnectionProfilesRequest], + Awaitable[clouddms.ListConnectionProfilesResponse]]: + r"""Return a callable for the list connection profiles method over gRPC. + + Retrieves a list of all connection profiles in a + given project and location. + + Returns: + Callable[[~.ListConnectionProfilesRequest], + Awaitable[~.ListConnectionProfilesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_connection_profiles' not in self._stubs: + self._stubs['list_connection_profiles'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListConnectionProfiles', + request_serializer=clouddms.ListConnectionProfilesRequest.serialize, + response_deserializer=clouddms.ListConnectionProfilesResponse.deserialize, + ) + return self._stubs['list_connection_profiles'] + + @property + def get_connection_profile(self) -> Callable[ + [clouddms.GetConnectionProfileRequest], + Awaitable[clouddms_resources.ConnectionProfile]]: + r"""Return a callable for the get connection profile method over gRPC. + + Gets details of a single connection profile. + + Returns: + Callable[[~.GetConnectionProfileRequest], + Awaitable[~.ConnectionProfile]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_connection_profile' not in self._stubs: + self._stubs['get_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetConnectionProfile', + request_serializer=clouddms.GetConnectionProfileRequest.serialize, + response_deserializer=clouddms_resources.ConnectionProfile.deserialize, + ) + return self._stubs['get_connection_profile'] + + @property + def create_connection_profile(self) -> Callable[ + [clouddms.CreateConnectionProfileRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create connection profile method over gRPC. + + Creates a new connection profile in a given project + and location. + + Returns: + Callable[[~.CreateConnectionProfileRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_connection_profile' not in self._stubs: + self._stubs['create_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreateConnectionProfile', + request_serializer=clouddms.CreateConnectionProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_connection_profile'] + + @property + def update_connection_profile(self) -> Callable[ + [clouddms.UpdateConnectionProfileRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update connection profile method over gRPC. + + Update the configuration of a single connection + profile. + + Returns: + Callable[[~.UpdateConnectionProfileRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_connection_profile' not in self._stubs: + self._stubs['update_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/UpdateConnectionProfile', + request_serializer=clouddms.UpdateConnectionProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_connection_profile'] + + @property + def delete_connection_profile(self) -> Callable[ + [clouddms.DeleteConnectionProfileRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete connection profile method over gRPC. + + Deletes a single Database Migration Service + connection profile. A connection profile can only be + deleted if it is not in use by any active migration + jobs. + + Returns: + Callable[[~.DeleteConnectionProfileRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_connection_profile' not in self._stubs: + self._stubs['delete_connection_profile'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeleteConnectionProfile', + request_serializer=clouddms.DeleteConnectionProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_connection_profile'] + + @property + def create_private_connection(self) -> Callable[ + [clouddms.CreatePrivateConnectionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create private connection method over gRPC. + + Creates a new private connection in a given project + and location. + + Returns: + Callable[[~.CreatePrivateConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_private_connection' not in self._stubs: + self._stubs['create_private_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreatePrivateConnection', + request_serializer=clouddms.CreatePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_private_connection'] + + @property + def get_private_connection(self) -> Callable[ + [clouddms.GetPrivateConnectionRequest], + Awaitable[clouddms_resources.PrivateConnection]]: + r"""Return a callable for the get private connection method over gRPC. + + Gets details of a single private connection. + + Returns: + Callable[[~.GetPrivateConnectionRequest], + Awaitable[~.PrivateConnection]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_private_connection' not in self._stubs: + self._stubs['get_private_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetPrivateConnection', + request_serializer=clouddms.GetPrivateConnectionRequest.serialize, + response_deserializer=clouddms_resources.PrivateConnection.deserialize, + ) + return self._stubs['get_private_connection'] + + @property + def list_private_connections(self) -> Callable[ + [clouddms.ListPrivateConnectionsRequest], + Awaitable[clouddms.ListPrivateConnectionsResponse]]: + r"""Return a callable for the list private connections method over gRPC. + + Retrieves a list of private connections in a given + project and location. + + Returns: + Callable[[~.ListPrivateConnectionsRequest], + Awaitable[~.ListPrivateConnectionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_private_connections' not in self._stubs: + self._stubs['list_private_connections'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListPrivateConnections', + request_serializer=clouddms.ListPrivateConnectionsRequest.serialize, + response_deserializer=clouddms.ListPrivateConnectionsResponse.deserialize, + ) + return self._stubs['list_private_connections'] + + @property + def delete_private_connection(self) -> Callable[ + [clouddms.DeletePrivateConnectionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete private connection method over gRPC. + + Deletes a single Database Migration Service private + connection. + + Returns: + Callable[[~.DeletePrivateConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_private_connection' not in self._stubs: + self._stubs['delete_private_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeletePrivateConnection', + request_serializer=clouddms.DeletePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_private_connection'] + + @property + def get_conversion_workspace(self) -> Callable[ + [clouddms.GetConversionWorkspaceRequest], + Awaitable[conversionworkspace_resources.ConversionWorkspace]]: + r"""Return a callable for the get conversion workspace method over gRPC. + + Gets details of a single conversion workspace. + + Returns: + Callable[[~.GetConversionWorkspaceRequest], + Awaitable[~.ConversionWorkspace]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_conversion_workspace' not in self._stubs: + self._stubs['get_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/GetConversionWorkspace', + request_serializer=clouddms.GetConversionWorkspaceRequest.serialize, + response_deserializer=conversionworkspace_resources.ConversionWorkspace.deserialize, + ) + return self._stubs['get_conversion_workspace'] + + @property + def list_conversion_workspaces(self) -> Callable[ + [clouddms.ListConversionWorkspacesRequest], + Awaitable[clouddms.ListConversionWorkspacesResponse]]: + r"""Return a callable for the list conversion workspaces method over gRPC. + + Lists conversion workspaces in a given project and + location. + + Returns: + Callable[[~.ListConversionWorkspacesRequest], + Awaitable[~.ListConversionWorkspacesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_conversion_workspaces' not in self._stubs: + self._stubs['list_conversion_workspaces'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ListConversionWorkspaces', + request_serializer=clouddms.ListConversionWorkspacesRequest.serialize, + response_deserializer=clouddms.ListConversionWorkspacesResponse.deserialize, + ) + return self._stubs['list_conversion_workspaces'] + + @property + def create_conversion_workspace(self) -> Callable[ + [clouddms.CreateConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create conversion workspace method over gRPC. + + Creates a new conversion workspace in a given project + and location. + + Returns: + Callable[[~.CreateConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_conversion_workspace' not in self._stubs: + self._stubs['create_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CreateConversionWorkspace', + request_serializer=clouddms.CreateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_conversion_workspace'] + + @property + def update_conversion_workspace(self) -> Callable[ + [clouddms.UpdateConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update conversion workspace method over gRPC. + + Updates the parameters of a single conversion + workspace. + + Returns: + Callable[[~.UpdateConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_conversion_workspace' not in self._stubs: + self._stubs['update_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/UpdateConversionWorkspace', + request_serializer=clouddms.UpdateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_conversion_workspace'] + + @property + def delete_conversion_workspace(self) -> Callable[ + [clouddms.DeleteConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete conversion workspace method over gRPC. + + Deletes a single conversion workspace. + + Returns: + Callable[[~.DeleteConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_conversion_workspace' not in self._stubs: + self._stubs['delete_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DeleteConversionWorkspace', + request_serializer=clouddms.DeleteConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_conversion_workspace'] + + @property + def seed_conversion_workspace(self) -> Callable[ + [clouddms.SeedConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the seed conversion workspace method over gRPC. + + Imports a snapshot of the source database into the + conversion workspace. + + Returns: + Callable[[~.SeedConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'seed_conversion_workspace' not in self._stubs: + self._stubs['seed_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/SeedConversionWorkspace', + request_serializer=clouddms.SeedConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['seed_conversion_workspace'] + + @property + def import_mapping_rules(self) -> Callable[ + [clouddms.ImportMappingRulesRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the import mapping rules method over gRPC. + + Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + Returns: + Callable[[~.ImportMappingRulesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'import_mapping_rules' not in self._stubs: + self._stubs['import_mapping_rules'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ImportMappingRules', + request_serializer=clouddms.ImportMappingRulesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['import_mapping_rules'] + + @property + def convert_conversion_workspace(self) -> Callable[ + [clouddms.ConvertConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the convert conversion workspace method over gRPC. + + Creates a draft tree schema for the destination + database. + + Returns: + Callable[[~.ConvertConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'convert_conversion_workspace' not in self._stubs: + self._stubs['convert_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ConvertConversionWorkspace', + request_serializer=clouddms.ConvertConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['convert_conversion_workspace'] + + @property + def commit_conversion_workspace(self) -> Callable[ + [clouddms.CommitConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the commit conversion workspace method over gRPC. + + Marks all the data in the conversion workspace as + committed. + + Returns: + Callable[[~.CommitConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'commit_conversion_workspace' not in self._stubs: + self._stubs['commit_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/CommitConversionWorkspace', + request_serializer=clouddms.CommitConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['commit_conversion_workspace'] + + @property + def rollback_conversion_workspace(self) -> Callable[ + [clouddms.RollbackConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the rollback conversion workspace method over gRPC. + + Rolls back a conversion workspace to the last + committed snapshot. + + Returns: + Callable[[~.RollbackConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rollback_conversion_workspace' not in self._stubs: + self._stubs['rollback_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/RollbackConversionWorkspace', + request_serializer=clouddms.RollbackConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['rollback_conversion_workspace'] + + @property + def apply_conversion_workspace(self) -> Callable[ + [clouddms.ApplyConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the apply conversion workspace method over gRPC. + + Applies draft tree onto a specific destination + database. + + Returns: + Callable[[~.ApplyConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'apply_conversion_workspace' not in self._stubs: + self._stubs['apply_conversion_workspace'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/ApplyConversionWorkspace', + request_serializer=clouddms.ApplyConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['apply_conversion_workspace'] + + @property + def describe_database_entities(self) -> Callable[ + [clouddms.DescribeDatabaseEntitiesRequest], + Awaitable[clouddms.DescribeDatabaseEntitiesResponse]]: + r"""Return a callable for the describe database entities method over gRPC. + + Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + Returns: + Callable[[~.DescribeDatabaseEntitiesRequest], + Awaitable[~.DescribeDatabaseEntitiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'describe_database_entities' not in self._stubs: + self._stubs['describe_database_entities'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DescribeDatabaseEntities', + request_serializer=clouddms.DescribeDatabaseEntitiesRequest.serialize, + response_deserializer=clouddms.DescribeDatabaseEntitiesResponse.deserialize, + ) + return self._stubs['describe_database_entities'] + + @property + def search_background_jobs(self) -> Callable[ + [clouddms.SearchBackgroundJobsRequest], + Awaitable[clouddms.SearchBackgroundJobsResponse]]: + r"""Return a callable for the search background jobs method over gRPC. + + Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + Returns: + Callable[[~.SearchBackgroundJobsRequest], + Awaitable[~.SearchBackgroundJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_background_jobs' not in self._stubs: + self._stubs['search_background_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/SearchBackgroundJobs', + request_serializer=clouddms.SearchBackgroundJobsRequest.serialize, + response_deserializer=clouddms.SearchBackgroundJobsResponse.deserialize, + ) + return self._stubs['search_background_jobs'] + + @property + def describe_conversion_workspace_revisions(self) -> Callable[ + [clouddms.DescribeConversionWorkspaceRevisionsRequest], + Awaitable[clouddms.DescribeConversionWorkspaceRevisionsResponse]]: + r"""Return a callable for the describe conversion workspace + revisions method over gRPC. + + Retrieves a list of committed revisions of a specific + conversion workspace. + + Returns: + Callable[[~.DescribeConversionWorkspaceRevisionsRequest], + Awaitable[~.DescribeConversionWorkspaceRevisionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'describe_conversion_workspace_revisions' not in self._stubs: + self._stubs['describe_conversion_workspace_revisions'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/DescribeConversionWorkspaceRevisions', + request_serializer=clouddms.DescribeConversionWorkspaceRevisionsRequest.serialize, + response_deserializer=clouddms.DescribeConversionWorkspaceRevisionsResponse.deserialize, + ) + return self._stubs['describe_conversion_workspace_revisions'] + + @property + def fetch_static_ips(self) -> Callable[ + [clouddms.FetchStaticIpsRequest], + Awaitable[clouddms.FetchStaticIpsResponse]]: + r"""Return a callable for the fetch static ips method over gRPC. + + Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + Returns: + Callable[[~.FetchStaticIpsRequest], + Awaitable[~.FetchStaticIpsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_static_ips' not in self._stubs: + self._stubs['fetch_static_ips'] = self.grpc_channel.unary_unary( + '/google.cloud.clouddms.v1.DataMigrationService/FetchStaticIps', + request_serializer=clouddms.FetchStaticIpsRequest.serialize, + response_deserializer=clouddms.FetchStaticIpsResponse.deserialize, + ) + return self._stubs['fetch_static_ips'] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ( + 'DataMigrationServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/types/__init__.py new file mode 100644 index 0000000..62abe85 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/types/__init__.py @@ -0,0 +1,216 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .clouddms import ( + ApplyConversionWorkspaceRequest, + CommitConversionWorkspaceRequest, + ConvertConversionWorkspaceRequest, + CreateConnectionProfileRequest, + CreateConversionWorkspaceRequest, + CreateMigrationJobRequest, + CreatePrivateConnectionRequest, + DeleteConnectionProfileRequest, + DeleteConversionWorkspaceRequest, + DeleteMigrationJobRequest, + DeletePrivateConnectionRequest, + DescribeConversionWorkspaceRevisionsRequest, + DescribeConversionWorkspaceRevisionsResponse, + DescribeDatabaseEntitiesRequest, + DescribeDatabaseEntitiesResponse, + FetchStaticIpsRequest, + FetchStaticIpsResponse, + GenerateSshScriptRequest, + GetConnectionProfileRequest, + GetConversionWorkspaceRequest, + GetMigrationJobRequest, + GetPrivateConnectionRequest, + ImportMappingRulesRequest, + ListConnectionProfilesRequest, + ListConnectionProfilesResponse, + ListConversionWorkspacesRequest, + ListConversionWorkspacesResponse, + ListMigrationJobsRequest, + ListMigrationJobsResponse, + ListPrivateConnectionsRequest, + ListPrivateConnectionsResponse, + OperationMetadata, + PromoteMigrationJobRequest, + RestartMigrationJobRequest, + ResumeMigrationJobRequest, + RollbackConversionWorkspaceRequest, + SearchBackgroundJobsRequest, + SearchBackgroundJobsResponse, + SeedConversionWorkspaceRequest, + SshScript, + StartMigrationJobRequest, + StopMigrationJobRequest, + UpdateConnectionProfileRequest, + UpdateConversionWorkspaceRequest, + UpdateMigrationJobRequest, + VerifyMigrationJobRequest, + VmCreationConfig, + VmSelectionConfig, +) +from .clouddms_resources import ( + AlloyDbConnectionProfile, + AlloyDbSettings, + CloudSqlConnectionProfile, + CloudSqlSettings, + ConnectionProfile, + ConversionWorkspaceInfo, + DatabaseType, + ForwardSshTunnelConnectivity, + MigrationJob, + MigrationJobVerificationError, + MySqlConnectionProfile, + OracleConnectionProfile, + PostgreSqlConnectionProfile, + PrivateConnection, + PrivateConnectivity, + PrivateServiceConnectConnectivity, + ReverseSshConnectivity, + SqlAclEntry, + SqlIpConfig, + SslConfig, + StaticIpConnectivity, + StaticServiceIpConnectivity, + VpcPeeringConfig, + VpcPeeringConnectivity, + DatabaseEngine, + DatabaseProvider, + NetworkArchitecture, +) +from .conversionworkspace_resources import ( + BackgroundJobLogEntry, + ColumnEntity, + ConstraintEntity, + ConversionWorkspace, + DatabaseEngineInfo, + DatabaseEntity, + EntityMapping, + EntityMappingLogEntry, + FunctionEntity, + IndexEntity, + PackageEntity, + SchemaEntity, + SequenceEntity, + StoredProcedureEntity, + SynonymEntity, + TableEntity, + TriggerEntity, + ViewEntity, + BackgroundJobType, + DatabaseEntityType, + ImportRulesFileFormat, +) + +__all__ = ( + 'ApplyConversionWorkspaceRequest', + 'CommitConversionWorkspaceRequest', + 'ConvertConversionWorkspaceRequest', + 'CreateConnectionProfileRequest', + 'CreateConversionWorkspaceRequest', + 'CreateMigrationJobRequest', + 'CreatePrivateConnectionRequest', + 'DeleteConnectionProfileRequest', + 'DeleteConversionWorkspaceRequest', + 'DeleteMigrationJobRequest', + 'DeletePrivateConnectionRequest', + 'DescribeConversionWorkspaceRevisionsRequest', + 'DescribeConversionWorkspaceRevisionsResponse', + 'DescribeDatabaseEntitiesRequest', + 'DescribeDatabaseEntitiesResponse', + 'FetchStaticIpsRequest', + 'FetchStaticIpsResponse', + 'GenerateSshScriptRequest', + 'GetConnectionProfileRequest', + 'GetConversionWorkspaceRequest', + 'GetMigrationJobRequest', + 'GetPrivateConnectionRequest', + 'ImportMappingRulesRequest', + 'ListConnectionProfilesRequest', + 'ListConnectionProfilesResponse', + 'ListConversionWorkspacesRequest', + 'ListConversionWorkspacesResponse', + 'ListMigrationJobsRequest', + 'ListMigrationJobsResponse', + 'ListPrivateConnectionsRequest', + 'ListPrivateConnectionsResponse', + 'OperationMetadata', + 'PromoteMigrationJobRequest', + 'RestartMigrationJobRequest', + 'ResumeMigrationJobRequest', + 'RollbackConversionWorkspaceRequest', + 'SearchBackgroundJobsRequest', + 'SearchBackgroundJobsResponse', + 'SeedConversionWorkspaceRequest', + 'SshScript', + 'StartMigrationJobRequest', + 'StopMigrationJobRequest', + 'UpdateConnectionProfileRequest', + 'UpdateConversionWorkspaceRequest', + 'UpdateMigrationJobRequest', + 'VerifyMigrationJobRequest', + 'VmCreationConfig', + 'VmSelectionConfig', + 'AlloyDbConnectionProfile', + 'AlloyDbSettings', + 'CloudSqlConnectionProfile', + 'CloudSqlSettings', + 'ConnectionProfile', + 'ConversionWorkspaceInfo', + 'DatabaseType', + 'ForwardSshTunnelConnectivity', + 'MigrationJob', + 'MigrationJobVerificationError', + 'MySqlConnectionProfile', + 'OracleConnectionProfile', + 'PostgreSqlConnectionProfile', + 'PrivateConnection', + 'PrivateConnectivity', + 'PrivateServiceConnectConnectivity', + 'ReverseSshConnectivity', + 'SqlAclEntry', + 'SqlIpConfig', + 'SslConfig', + 'StaticIpConnectivity', + 'StaticServiceIpConnectivity', + 'VpcPeeringConfig', + 'VpcPeeringConnectivity', + 'DatabaseEngine', + 'DatabaseProvider', + 'NetworkArchitecture', + 'BackgroundJobLogEntry', + 'ColumnEntity', + 'ConstraintEntity', + 'ConversionWorkspace', + 'DatabaseEngineInfo', + 'DatabaseEntity', + 'EntityMapping', + 'EntityMappingLogEntry', + 'FunctionEntity', + 'IndexEntity', + 'PackageEntity', + 'SchemaEntity', + 'SequenceEntity', + 'StoredProcedureEntity', + 'SynonymEntity', + 'TableEntity', + 'TriggerEntity', + 'ViewEntity', + 'BackgroundJobType', + 'DatabaseEntityType', + 'ImportRulesFileFormat', +) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms.py new file mode 100644 index 0000000..0c296cb --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms.py @@ -0,0 +1,1718 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.clouddms.v1', + manifest={ + 'ListMigrationJobsRequest', + 'ListMigrationJobsResponse', + 'GetMigrationJobRequest', + 'CreateMigrationJobRequest', + 'UpdateMigrationJobRequest', + 'DeleteMigrationJobRequest', + 'StartMigrationJobRequest', + 'StopMigrationJobRequest', + 'ResumeMigrationJobRequest', + 'PromoteMigrationJobRequest', + 'VerifyMigrationJobRequest', + 'RestartMigrationJobRequest', + 'GenerateSshScriptRequest', + 'VmCreationConfig', + 'VmSelectionConfig', + 'SshScript', + 'ListConnectionProfilesRequest', + 'ListConnectionProfilesResponse', + 'GetConnectionProfileRequest', + 'CreateConnectionProfileRequest', + 'UpdateConnectionProfileRequest', + 'DeleteConnectionProfileRequest', + 'CreatePrivateConnectionRequest', + 'ListPrivateConnectionsRequest', + 'ListPrivateConnectionsResponse', + 'DeletePrivateConnectionRequest', + 'GetPrivateConnectionRequest', + 'OperationMetadata', + 'ListConversionWorkspacesRequest', + 'ListConversionWorkspacesResponse', + 'GetConversionWorkspaceRequest', + 'CreateConversionWorkspaceRequest', + 'UpdateConversionWorkspaceRequest', + 'DeleteConversionWorkspaceRequest', + 'CommitConversionWorkspaceRequest', + 'RollbackConversionWorkspaceRequest', + 'ApplyConversionWorkspaceRequest', + 'SeedConversionWorkspaceRequest', + 'ConvertConversionWorkspaceRequest', + 'ImportMappingRulesRequest', + 'DescribeDatabaseEntitiesRequest', + 'DescribeDatabaseEntitiesResponse', + 'SearchBackgroundJobsRequest', + 'SearchBackgroundJobsResponse', + 'DescribeConversionWorkspaceRevisionsRequest', + 'DescribeConversionWorkspaceRevisionsResponse', + 'FetchStaticIpsRequest', + 'FetchStaticIpsResponse', + }, +) + + +class ListMigrationJobsRequest(proto.Message): + r"""Retrieves a list of all migration jobs in a given project and + location. + + Attributes: + parent (str): + Required. The parent which owns this + collection of migrationJobs. + page_size (int): + The maximum number of migration jobs to + return. The service may return fewer than this + value. If unspecified, at most 50 migration jobs + will be returned. The maximum value is 1000; + values above 1000 are coerced to 1000. + page_token (str): + The nextPageToken value received in the + previous call to migrationJobs.list, used in the + subsequent request to retrieve the next page of + results. On first call this should be left + blank. When paginating, all other parameters + provided to migrationJobs.list must match the + call that provided the page token. + filter (str): + A filter expression that filters migration jobs listed in + the response. The expression must specify the field name, a + comparison operator, and the value that you want to use for + filtering. The value must be a string, a number, or a + boolean. The comparison operator must be either =, !=, >, or + <. For example, list migration jobs created this year by + specifying **createTime %gt; + 2020-01-01T00:00:00.000000000Z.** You can also filter nested + fields. For example, you could specify + **reverseSshConnectivity.vmIp = "1.2.3.4"** to select all + migration jobs connecting through the specific SSH tunnel + bastion. + order_by (str): + Sort the results based on the migration job + name. Valid values are: "name", "name asc", and + "name desc". + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListMigrationJobsResponse(proto.Message): + r"""Response message for 'ListMigrationJobs' request. + + Attributes: + migration_jobs (MutableSequence[google.cloud.clouddms_v1.types.MigrationJob]): + The list of migration jobs objects. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + migration_jobs: MutableSequence[clouddms_resources.MigrationJob] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=clouddms_resources.MigrationJob, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetMigrationJobRequest(proto.Message): + r"""Request message for 'GetMigrationJob' request. + + Attributes: + name (str): + Required. Name of the migration job resource + to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateMigrationJobRequest(proto.Message): + r"""Request message to create a new Database Migration Service + migration job in the specified project and region. + + Attributes: + parent (str): + Required. The parent which owns this + collection of migration jobs. + migration_job_id (str): + Required. The ID of the instance to create. + migration_job (google.cloud.clouddms_v1.types.MigrationJob): + Required. Represents a `migration + job `__ + object. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + migration_job_id: str = proto.Field( + proto.STRING, + number=2, + ) + migration_job: clouddms_resources.MigrationJob = proto.Field( + proto.MESSAGE, + number=3, + message=clouddms_resources.MigrationJob, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateMigrationJobRequest(proto.Message): + r"""Request message for 'UpdateMigrationJob' request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the + fields to be overwritten by the update in the + conversion workspace resource. + migration_job (google.cloud.clouddms_v1.types.MigrationJob): + Required. The migration job parameters to + update. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + migration_job: clouddms_resources.MigrationJob = proto.Field( + proto.MESSAGE, + number=2, + message=clouddms_resources.MigrationJob, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteMigrationJobRequest(proto.Message): + r"""Request message for 'DeleteMigrationJob' request. + + Attributes: + name (str): + Required. Name of the migration job resource + to delete. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + force (bool): + The destination CloudSQL connection profile + is always deleted with the migration job. In + case of force delete, the destination CloudSQL + replica database is also deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class StartMigrationJobRequest(proto.Message): + r"""Request message for 'StartMigrationJob' request. + + Attributes: + name (str): + Name of the migration job resource to start. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class StopMigrationJobRequest(proto.Message): + r"""Request message for 'StopMigrationJob' request. + + Attributes: + name (str): + Name of the migration job resource to stop. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ResumeMigrationJobRequest(proto.Message): + r"""Request message for 'ResumeMigrationJob' request. + + Attributes: + name (str): + Name of the migration job resource to resume. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PromoteMigrationJobRequest(proto.Message): + r"""Request message for 'PromoteMigrationJob' request. + + Attributes: + name (str): + Name of the migration job resource to + promote. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class VerifyMigrationJobRequest(proto.Message): + r"""Request message for 'VerifyMigrationJob' request. + + Attributes: + name (str): + Name of the migration job resource to verify. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RestartMigrationJobRequest(proto.Message): + r"""Request message for 'RestartMigrationJob' request. + + Attributes: + name (str): + Name of the migration job resource to + restart. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GenerateSshScriptRequest(proto.Message): + r"""Request message for 'GenerateSshScript' request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + migration_job (str): + Name of the migration job resource to + generate the SSH script. + vm (str): + Required. Bastion VM Instance name to use or + to create. + vm_creation_config (google.cloud.clouddms_v1.types.VmCreationConfig): + The VM creation configuration + + This field is a member of `oneof`_ ``vm_config``. + vm_selection_config (google.cloud.clouddms_v1.types.VmSelectionConfig): + The VM selection configuration + + This field is a member of `oneof`_ ``vm_config``. + vm_port (int): + The port that will be open on the bastion + host. + """ + + migration_job: str = proto.Field( + proto.STRING, + number=1, + ) + vm: str = proto.Field( + proto.STRING, + number=2, + ) + vm_creation_config: 'VmCreationConfig' = proto.Field( + proto.MESSAGE, + number=100, + oneof='vm_config', + message='VmCreationConfig', + ) + vm_selection_config: 'VmSelectionConfig' = proto.Field( + proto.MESSAGE, + number=101, + oneof='vm_config', + message='VmSelectionConfig', + ) + vm_port: int = proto.Field( + proto.INT32, + number=3, + ) + + +class VmCreationConfig(proto.Message): + r"""VM creation configuration message + + Attributes: + vm_machine_type (str): + Required. VM instance machine type to create. + vm_zone (str): + The Google Cloud Platform zone to create the + VM in. + subnet (str): + The subnet name the vm needs to be created + in. + """ + + vm_machine_type: str = proto.Field( + proto.STRING, + number=1, + ) + vm_zone: str = proto.Field( + proto.STRING, + number=2, + ) + subnet: str = proto.Field( + proto.STRING, + number=3, + ) + + +class VmSelectionConfig(proto.Message): + r"""VM selection configuration message + + Attributes: + vm_zone (str): + Required. The Google Cloud Platform zone the + VM is located. + """ + + vm_zone: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SshScript(proto.Message): + r"""Response message for 'GenerateSshScript' request. + + Attributes: + script (str): + The ssh configuration script. + """ + + script: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListConnectionProfilesRequest(proto.Message): + r"""Request message for 'ListConnectionProfiles' request. + + Attributes: + parent (str): + Required. The parent which owns this + collection of connection profiles. + page_size (int): + The maximum number of connection profiles to + return. The service may return fewer than this + value. If unspecified, at most 50 connection + profiles will be returned. The maximum value is + 1000; values above 1000 are coerced to 1000. + page_token (str): + A page token, received from a previous + ``ListConnectionProfiles`` call. Provide this to retrieve + the subsequent page. + + When paginating, all other parameters provided to + ``ListConnectionProfiles`` must match the call that provided + the page token. + filter (str): + A filter expression that filters connection profiles listed + in the response. The expression must specify the field name, + a comparison operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The comparison operator must be either =, !=, >, or + <. For example, list connection profiles created this year + by specifying **createTime %gt; + 2020-01-01T00:00:00.000000000Z**. You can also filter nested + fields. For example, you could specify **mySql.username = + %lt;my_username%gt;** to list all connection profiles + configured to connect with a specific username. + order_by (str): + A comma-separated list of fields to order + results according to. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListConnectionProfilesResponse(proto.Message): + r"""Response message for 'ListConnectionProfiles' request. + + Attributes: + connection_profiles (MutableSequence[google.cloud.clouddms_v1.types.ConnectionProfile]): + The response list of connection profiles. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + connection_profiles: MutableSequence[clouddms_resources.ConnectionProfile] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=clouddms_resources.ConnectionProfile, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetConnectionProfileRequest(proto.Message): + r"""Request message for 'GetConnectionProfile' request. + + Attributes: + name (str): + Required. Name of the connection profile + resource to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateConnectionProfileRequest(proto.Message): + r"""Request message for 'CreateConnectionProfile' request. + + Attributes: + parent (str): + Required. The parent which owns this + collection of connection profiles. + connection_profile_id (str): + Required. The connection profile identifier. + connection_profile (google.cloud.clouddms_v1.types.ConnectionProfile): + Required. The create request body including + the connection profile data + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + validate_only (bool): + Optional. Only validate the connection + profile, but don't create any resources. The + default is false. Only supported for Oracle + connection profiles. + skip_validation (bool): + Optional. Create the connection profile + without validating it. The default is false. + Only supported for Oracle connection profiles. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + connection_profile_id: str = proto.Field( + proto.STRING, + number=2, + ) + connection_profile: clouddms_resources.ConnectionProfile = proto.Field( + proto.MESSAGE, + number=3, + message=clouddms_resources.ConnectionProfile, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + skip_validation: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class UpdateConnectionProfileRequest(proto.Message): + r"""Request message for 'UpdateConnectionProfile' request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the + fields to be overwritten by the update in the + conversion workspace resource. + connection_profile (google.cloud.clouddms_v1.types.ConnectionProfile): + Required. The connection profile parameters + to update. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + validate_only (bool): + Optional. Only validate the connection + profile, but don't update any resources. The + default is false. Only supported for Oracle + connection profiles. + skip_validation (bool): + Optional. Update the connection profile + without validating it. The default is false. + Only supported for Oracle connection profiles. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + connection_profile: clouddms_resources.ConnectionProfile = proto.Field( + proto.MESSAGE, + number=2, + message=clouddms_resources.ConnectionProfile, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + skip_validation: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteConnectionProfileRequest(proto.Message): + r"""Request message for 'DeleteConnectionProfile' request. + + Attributes: + name (str): + Required. Name of the connection profile + resource to delete. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + force (bool): + In case of force delete, the CloudSQL replica + database is also deleted (only for CloudSQL + connection profile). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class CreatePrivateConnectionRequest(proto.Message): + r"""Request message to create a new private connection in the + specified project and region. + + Attributes: + parent (str): + Required. The parent that owns the collection + of PrivateConnections. + private_connection_id (str): + Required. The private connection identifier. + private_connection (google.cloud.clouddms_v1.types.PrivateConnection): + Required. The private connection resource to + create. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + skip_validation (bool): + Optional. If set to true, will skip + validations. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + private_connection_id: str = proto.Field( + proto.STRING, + number=2, + ) + private_connection: clouddms_resources.PrivateConnection = proto.Field( + proto.MESSAGE, + number=3, + message=clouddms_resources.PrivateConnection, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + skip_validation: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class ListPrivateConnectionsRequest(proto.Message): + r"""Request message to retrieve a list of private connections in + a given project and location. + + Attributes: + parent (str): + Required. The parent that owns the collection + of private connections. + page_size (int): + Maximum number of private connections to + return. If unspecified, at most 50 private + connections that are returned. The maximum value + is 1000; values above 1000 are coerced to 1000. + page_token (str): + Page token received from a previous + ``ListPrivateConnections`` call. Provide this to retrieve + the subsequent page. + + When paginating, all other parameters provided to + ``ListPrivateConnections`` must match the call that provided + the page token. + filter (str): + A filter expression that filters private connections listed + in the response. The expression must specify the field name, + a comparison operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The comparison operator must be either =, !=, >, or + <. For example, list private connections created this year + by specifying **createTime %gt; + 2021-01-01T00:00:00.000000000Z**. + order_by (str): + Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListPrivateConnectionsResponse(proto.Message): + r"""Response message for 'ListPrivateConnections' request. + + Attributes: + private_connections (MutableSequence[google.cloud.clouddms_v1.types.PrivateConnection]): + List of private connections. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + private_connections: MutableSequence[clouddms_resources.PrivateConnection] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=clouddms_resources.PrivateConnection, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeletePrivateConnectionRequest(proto.Message): + r"""Request message to delete a private connection. + + Attributes: + name (str): + Required. The name of the private connection + to delete. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetPrivateConnectionRequest(proto.Message): + r"""Request message to get a private connection resource. + + Attributes: + name (str): + Required. The name of the private connection + to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ListConversionWorkspacesRequest(proto.Message): + r"""Retrieve a list of all conversion workspaces in a given + project and location. + + Attributes: + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + page_size (int): + The maximum number of conversion workspaces + to return. The service may return fewer than + this value. If unspecified, at most 50 sets are + returned. + page_token (str): + The nextPageToken value received in the + previous call to conversionWorkspaces.list, used + in the subsequent request to retrieve the next + page of results. On first call this should be + left blank. When paginating, all other + parameters provided to conversionWorkspaces.list + must match the call that provided the page + token. + filter (str): + A filter expression that filters conversion workspaces + listed in the response. The expression must specify the + field name, a comparison operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The comparison operator must be either + =, !=, >, or <. For example, list conversion workspaces + created this year by specifying **createTime %gt; + 2020-01-01T00:00:00.000000000Z.** You can also filter nested + fields. For example, you could specify **source.version = + "12.c.1"** to select all conversion workspaces with source + database version equal to 12.c.1. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListConversionWorkspacesResponse(proto.Message): + r"""Response message for 'ListConversionWorkspaces' request. + + Attributes: + conversion_workspaces (MutableSequence[google.cloud.clouddms_v1.types.ConversionWorkspace]): + The list of conversion workspace objects. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + conversion_workspaces: MutableSequence[conversionworkspace_resources.ConversionWorkspace] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.ConversionWorkspace, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetConversionWorkspaceRequest(proto.Message): + r"""Request message for 'GetConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateConversionWorkspaceRequest(proto.Message): + r"""Request message to create a new Conversion Workspace + in the specified project and region. + + Attributes: + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + conversion_workspace_id (str): + Required. The ID of the conversion workspace + to create. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. Represents a conversion workspace + object. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + conversion_workspace_id: str = proto.Field( + proto.STRING, + number=2, + ) + conversion_workspace: conversionworkspace_resources.ConversionWorkspace = proto.Field( + proto.MESSAGE, + number=3, + message=conversionworkspace_resources.ConversionWorkspace, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateConversionWorkspaceRequest(proto.Message): + r"""Request message for 'UpdateConversionWorkspace' request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the + fields to be overwritten by the update in the + conversion workspace resource. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. The conversion workspace parameters + to update. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + conversion_workspace: conversionworkspace_resources.ConversionWorkspace = proto.Field( + proto.MESSAGE, + number=2, + message=conversionworkspace_resources.ConversionWorkspace, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteConversionWorkspaceRequest(proto.Message): + r"""Request message for 'DeleteConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to delete. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CommitConversionWorkspaceRequest(proto.Message): + r"""Request message for 'CommitConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to commit. + commit_name (str): + Optional. Optional name of the commit. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + commit_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RollbackConversionWorkspaceRequest(proto.Message): + r"""Request message for 'RollbackConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to roll back to. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ApplyConversionWorkspaceRequest(proto.Message): + r"""Request message for 'ApplyConversionWorkspace' request. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The name of the conversion workspace resource for + which to apply the draft tree. Must be in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + filter (str): + Filter which entities to apply. Leaving this + field empty will apply all of the entities. + Supports Google AIP 160 based filtering. + connection_profile (str): + Fully qualified (Uri) name of the destination + connection profile. + + This field is a member of `oneof`_ ``destination``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + connection_profile: str = proto.Field( + proto.STRING, + number=100, + oneof='destination', + ) + + +class SeedConversionWorkspaceRequest(proto.Message): + r"""Request message for 'SeedConversionWorkspace' request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Name of the conversion workspace resource to seed with new + database structure, in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + auto_commit (bool): + Should the conversion workspace be committed + automatically after the seed operation. + source_connection_profile (str): + Fully qualified (Uri) name of the source + connection profile. + + This field is a member of `oneof`_ ``seed_from``. + destination_connection_profile (str): + Fully qualified (Uri) name of the destination + connection profile. + + This field is a member of `oneof`_ ``seed_from``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + auto_commit: bool = proto.Field( + proto.BOOL, + number=2, + ) + source_connection_profile: str = proto.Field( + proto.STRING, + number=100, + oneof='seed_from', + ) + destination_connection_profile: str = proto.Field( + proto.STRING, + number=101, + oneof='seed_from', + ) + + +class ConvertConversionWorkspaceRequest(proto.Message): + r"""Request message for 'ConvertConversionWorkspace' request. + + Attributes: + name (str): + Name of the conversion workspace resource to convert in the + form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + auto_commit (bool): + Specifies whether the conversion workspace is + to be committed automatically after the + conversion. + filter (str): + Filter the entities to convert. Leaving this + field empty will convert all of the entities. + Supports Google AIP-160 style filtering. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + auto_commit: bool = proto.Field( + proto.BOOL, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ImportMappingRulesRequest(proto.Message): + r"""Request message for 'ImportMappingRules' request. + + Attributes: + parent (str): + Required. Name of the conversion workspace resource to + import the rules to in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + rules_format (google.cloud.clouddms_v1.types.ImportRulesFileFormat): + The format of the rules content file. + rules_files (MutableSequence[google.cloud.clouddms_v1.types.ImportMappingRulesRequest.RulesFile]): + One or more rules files. + auto_commit (bool): + Should the conversion workspace be committed + automatically after the import operation. + """ + + class RulesFile(proto.Message): + r"""Details of a single rules file. + + Attributes: + rules_source_filename (str): + The filename of the rules that needs to be + converted. The filename is used mainly so that + future logs of the import rules job contain it, + and can therefore be searched by it. + rules_content (str): + The text content of the rules that needs to + be converted. + """ + + rules_source_filename: str = proto.Field( + proto.STRING, + number=1, + ) + rules_content: str = proto.Field( + proto.STRING, + number=2, + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + rules_format: conversionworkspace_resources.ImportRulesFileFormat = proto.Field( + proto.ENUM, + number=2, + enum=conversionworkspace_resources.ImportRulesFileFormat, + ) + rules_files: MutableSequence[RulesFile] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=RulesFile, + ) + auto_commit: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class DescribeDatabaseEntitiesRequest(proto.Message): + r"""Request message for 'DescribeDatabaseEntities' request. + + Attributes: + conversion_workspace (str): + Required. Name of the conversion workspace resource whose + database entities are described. Must be in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + page_size (int): + The maximum number of entities to return. The + service may return fewer entities than the value + specifies. + page_token (str): + The nextPageToken value received in the + previous call to + conversionWorkspace.describeDatabaseEntities, + used in the subsequent request to retrieve the + next page of results. On first call this should + be left blank. When paginating, all other + parameters provided to + conversionWorkspace.describeDatabaseEntities + must match the call that provided the page + token. + tree (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest.DBTreeType): + The tree to fetch. + uncommitted (bool): + Whether to retrieve the latest committed version of the + entities or the latest version. This field is ignored if a + specific commit_id is specified. + commit_id (str): + Request a specific commit ID. If not + specified, the entities from the latest commit + are returned. + filter (str): + Filter the returned entities based on AIP-160 + standard. + """ + class DBTreeType(proto.Enum): + r"""The type of a tree to return + + Values: + DB_TREE_TYPE_UNSPECIFIED (0): + Unspecified tree type. + SOURCE_TREE (1): + The source database tree. + DRAFT_TREE (2): + The draft database tree. + DESTINATION_TREE (3): + The destination database tree. + """ + DB_TREE_TYPE_UNSPECIFIED = 0 + SOURCE_TREE = 1 + DRAFT_TREE = 2 + DESTINATION_TREE = 3 + + conversion_workspace: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + tree: DBTreeType = proto.Field( + proto.ENUM, + number=6, + enum=DBTreeType, + ) + uncommitted: bool = proto.Field( + proto.BOOL, + number=11, + ) + commit_id: str = proto.Field( + proto.STRING, + number=12, + ) + filter: str = proto.Field( + proto.STRING, + number=13, + ) + + +class DescribeDatabaseEntitiesResponse(proto.Message): + r"""Response message for 'DescribeDatabaseEntities' request. + + Attributes: + database_entities (MutableSequence[google.cloud.clouddms_v1.types.DatabaseEntity]): + The list of database entities for the + conversion workspace. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + database_entities: MutableSequence[conversionworkspace_resources.DatabaseEntity] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.DatabaseEntity, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SearchBackgroundJobsRequest(proto.Message): + r"""Request message for 'SearchBackgroundJobs' request. + + Attributes: + conversion_workspace (str): + Required. Name of the conversion workspace resource whose + jobs are listed, in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + return_most_recent_per_job_type (bool): + Optional. Whether or not to return just the + most recent job per job type, + max_size (int): + Optional. The maximum number of jobs to + return. The service may return fewer than this + value. If unspecified, at most 100 jobs are + returned. The maximum value is 100; values above + 100 are coerced to 100. + completed_until_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. If provided, only returns jobs that + completed until (not including) the given + timestamp. + """ + + conversion_workspace: str = proto.Field( + proto.STRING, + number=1, + ) + return_most_recent_per_job_type: bool = proto.Field( + proto.BOOL, + number=2, + ) + max_size: int = proto.Field( + proto.INT32, + number=3, + ) + completed_until_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class SearchBackgroundJobsResponse(proto.Message): + r"""Response message for 'SearchBackgroundJobs' request. + + Attributes: + jobs (MutableSequence[google.cloud.clouddms_v1.types.BackgroundJobLogEntry]): + The list of conversion workspace mapping + rules. + """ + + jobs: MutableSequence[conversionworkspace_resources.BackgroundJobLogEntry] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.BackgroundJobLogEntry, + ) + + +class DescribeConversionWorkspaceRevisionsRequest(proto.Message): + r"""Request message for 'DescribeConversionWorkspaceRevisions' + request. + + Attributes: + conversion_workspace (str): + Required. Name of the conversion workspace resource whose + revisions are listed. Must be in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + commit_id (str): + Optional. Optional filter to request a + specific commit ID. + """ + + conversion_workspace: str = proto.Field( + proto.STRING, + number=1, + ) + commit_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DescribeConversionWorkspaceRevisionsResponse(proto.Message): + r"""Response message for 'DescribeConversionWorkspaceRevisions' + request. + + Attributes: + revisions (MutableSequence[google.cloud.clouddms_v1.types.ConversionWorkspace]): + The list of conversion workspace revisions. + """ + + revisions: MutableSequence[conversionworkspace_resources.ConversionWorkspace] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.ConversionWorkspace, + ) + + +class FetchStaticIpsRequest(proto.Message): + r"""Request message for 'FetchStaticIps' request. + + Attributes: + name (str): + Required. The resource name for the location for which + static IPs should be returned. Must be in the format + ``projects/*/locations/*``. + page_size (int): + Maximum number of IPs to return. + page_token (str): + A page token, received from a previous ``FetchStaticIps`` + call. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchStaticIpsResponse(proto.Message): + r"""Response message for a 'FetchStaticIps' request. + + Attributes: + static_ips (MutableSequence[str]): + List of static IPs. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + static_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms_resources.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms_resources.py new file mode 100644 index 0000000..f0bdff0 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms_resources.py @@ -0,0 +1,2025 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.clouddms.v1', + manifest={ + 'NetworkArchitecture', + 'DatabaseEngine', + 'DatabaseProvider', + 'SslConfig', + 'MySqlConnectionProfile', + 'PostgreSqlConnectionProfile', + 'OracleConnectionProfile', + 'CloudSqlConnectionProfile', + 'AlloyDbConnectionProfile', + 'SqlAclEntry', + 'SqlIpConfig', + 'CloudSqlSettings', + 'AlloyDbSettings', + 'StaticIpConnectivity', + 'PrivateServiceConnectConnectivity', + 'ReverseSshConnectivity', + 'VpcPeeringConnectivity', + 'ForwardSshTunnelConnectivity', + 'StaticServiceIpConnectivity', + 'PrivateConnectivity', + 'DatabaseType', + 'MigrationJob', + 'ConversionWorkspaceInfo', + 'ConnectionProfile', + 'MigrationJobVerificationError', + 'PrivateConnection', + 'VpcPeeringConfig', + }, +) + + +class NetworkArchitecture(proto.Enum): + r""" + + Values: + NETWORK_ARCHITECTURE_UNSPECIFIED (0): + No description available. + NETWORK_ARCHITECTURE_OLD_CSQL_PRODUCER (1): + Instance is in Cloud SQL's old producer + network architecture. + NETWORK_ARCHITECTURE_NEW_CSQL_PRODUCER (2): + Instance is in Cloud SQL's new producer + network architecture. + """ + NETWORK_ARCHITECTURE_UNSPECIFIED = 0 + NETWORK_ARCHITECTURE_OLD_CSQL_PRODUCER = 1 + NETWORK_ARCHITECTURE_NEW_CSQL_PRODUCER = 2 + + +class DatabaseEngine(proto.Enum): + r"""The database engine types. + + Values: + DATABASE_ENGINE_UNSPECIFIED (0): + The source database engine of the migration + job is unknown. + MYSQL (1): + The source engine is MySQL. + POSTGRESQL (2): + The source engine is PostgreSQL. + ORACLE (4): + The source engine is Oracle. + """ + DATABASE_ENGINE_UNSPECIFIED = 0 + MYSQL = 1 + POSTGRESQL = 2 + ORACLE = 4 + + +class DatabaseProvider(proto.Enum): + r"""The database providers. + + Values: + DATABASE_PROVIDER_UNSPECIFIED (0): + The database provider is unknown. + CLOUDSQL (1): + CloudSQL runs the database. + RDS (2): + RDS runs the database. + AURORA (3): + Amazon Aurora. + ALLOYDB (4): + AlloyDB. + """ + DATABASE_PROVIDER_UNSPECIFIED = 0 + CLOUDSQL = 1 + RDS = 2 + AURORA = 3 + ALLOYDB = 4 + + +class SslConfig(proto.Message): + r"""SSL configuration information. + + Attributes: + type_ (google.cloud.clouddms_v1.types.SslConfig.SslType): + Output only. The ssl config type according to 'client_key', + 'client_certificate' and 'ca_certificate'. + client_key (str): + Input only. The unencrypted PKCS#1 or PKCS#8 PEM-encoded + private key associated with the Client Certificate. If this + field is used then the 'client_certificate' field is + mandatory. + client_certificate (str): + Input only. The x509 PEM-encoded certificate that will be + used by the replica to authenticate against the source + database server.If this field is used then the 'client_key' + field is mandatory. + ca_certificate (str): + Required. Input only. The x509 PEM-encoded + certificate of the CA that signed the source + database server's certificate. The replica will + use this certificate to verify it's connecting + to the right host. + """ + class SslType(proto.Enum): + r"""Specifies The kind of ssl configuration used. + + Values: + SSL_TYPE_UNSPECIFIED (0): + Unspecified. + SERVER_ONLY (1): + Only 'ca_certificate' specified. + SERVER_CLIENT (2): + Both server ('ca_certificate'), and client ('client_key', + 'client_certificate') specified. + """ + SSL_TYPE_UNSPECIFIED = 0 + SERVER_ONLY = 1 + SERVER_CLIENT = 2 + + type_: SslType = proto.Field( + proto.ENUM, + number=1, + enum=SslType, + ) + client_key: str = proto.Field( + proto.STRING, + number=2, + ) + client_certificate: str = proto.Field( + proto.STRING, + number=3, + ) + ca_certificate: str = proto.Field( + proto.STRING, + number=4, + ) + + +class MySqlConnectionProfile(proto.Message): + r"""Specifies connection parameters required specifically for + MySQL databases. + + Attributes: + host (str): + Required. The IP or hostname of the source + MySQL database. + port (int): + Required. The network port of the source + MySQL database. + username (str): + Required. The username that Database + Migration Service will use to connect to the + database. The value is encrypted when stored in + Database Migration Service. + password (str): + Required. Input only. The password for the + user that Database Migration Service will be + using to connect to the database. This field is + not returned on request, and the value is + encrypted when stored in Database Migration + Service. + password_set (bool): + Output only. Indicates If this connection + profile password is stored. + ssl (google.cloud.clouddms_v1.types.SslConfig): + SSL configuration for the destination to + connect to the source database. + cloud_sql_id (str): + If the source is a Cloud SQL database, use + this field to provide the Cloud SQL instance ID + of the source. + """ + + host: str = proto.Field( + proto.STRING, + number=1, + ) + port: int = proto.Field( + proto.INT32, + number=2, + ) + username: str = proto.Field( + proto.STRING, + number=3, + ) + password: str = proto.Field( + proto.STRING, + number=4, + ) + password_set: bool = proto.Field( + proto.BOOL, + number=5, + ) + ssl: 'SslConfig' = proto.Field( + proto.MESSAGE, + number=6, + message='SslConfig', + ) + cloud_sql_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class PostgreSqlConnectionProfile(proto.Message): + r"""Specifies connection parameters required specifically for + PostgreSQL databases. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + host (str): + Required. The IP or hostname of the source + PostgreSQL database. + port (int): + Required. The network port of the source + PostgreSQL database. + username (str): + Required. The username that Database + Migration Service will use to connect to the + database. The value is encrypted when stored in + Database Migration Service. + password (str): + Required. Input only. The password for the + user that Database Migration Service will be + using to connect to the database. This field is + not returned on request, and the value is + encrypted when stored in Database Migration + Service. + password_set (bool): + Output only. Indicates If this connection + profile password is stored. + ssl (google.cloud.clouddms_v1.types.SslConfig): + SSL configuration for the destination to + connect to the source database. + cloud_sql_id (str): + If the source is a Cloud SQL database, use + this field to provide the Cloud SQL instance ID + of the source. + network_architecture (google.cloud.clouddms_v1.types.NetworkArchitecture): + Output only. If the source is a Cloud SQL + database, this field indicates the network + architecture it's associated with. + static_ip_connectivity (google.cloud.clouddms_v1.types.StaticIpConnectivity): + Static ip connectivity data (default, no + additional details needed). + + This field is a member of `oneof`_ ``connectivity``. + private_service_connect_connectivity (google.cloud.clouddms_v1.types.PrivateServiceConnectConnectivity): + Private service connect connectivity. + + This field is a member of `oneof`_ ``connectivity``. + """ + + host: str = proto.Field( + proto.STRING, + number=1, + ) + port: int = proto.Field( + proto.INT32, + number=2, + ) + username: str = proto.Field( + proto.STRING, + number=3, + ) + password: str = proto.Field( + proto.STRING, + number=4, + ) + password_set: bool = proto.Field( + proto.BOOL, + number=5, + ) + ssl: 'SslConfig' = proto.Field( + proto.MESSAGE, + number=6, + message='SslConfig', + ) + cloud_sql_id: str = proto.Field( + proto.STRING, + number=7, + ) + network_architecture: 'NetworkArchitecture' = proto.Field( + proto.ENUM, + number=8, + enum='NetworkArchitecture', + ) + static_ip_connectivity: 'StaticIpConnectivity' = proto.Field( + proto.MESSAGE, + number=100, + oneof='connectivity', + message='StaticIpConnectivity', + ) + private_service_connect_connectivity: 'PrivateServiceConnectConnectivity' = proto.Field( + proto.MESSAGE, + number=101, + oneof='connectivity', + message='PrivateServiceConnectConnectivity', + ) + + +class OracleConnectionProfile(proto.Message): + r"""Specifies connection parameters required specifically for + Oracle databases. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + host (str): + Required. The IP or hostname of the source + Oracle database. + port (int): + Required. The network port of the source + Oracle database. + username (str): + Required. The username that Database + Migration Service will use to connect to the + database. The value is encrypted when stored in + Database Migration Service. + password (str): + Required. Input only. The password for the + user that Database Migration Service will be + using to connect to the database. This field is + not returned on request, and the value is + encrypted when stored in Database Migration + Service. + password_set (bool): + Output only. Indicates whether a new password + is included in the request. + database_service (str): + Required. Database service for the Oracle + connection. + static_service_ip_connectivity (google.cloud.clouddms_v1.types.StaticServiceIpConnectivity): + Static Service IP connectivity. + + This field is a member of `oneof`_ ``connectivity``. + forward_ssh_connectivity (google.cloud.clouddms_v1.types.ForwardSshTunnelConnectivity): + Forward SSH tunnel connectivity. + + This field is a member of `oneof`_ ``connectivity``. + private_connectivity (google.cloud.clouddms_v1.types.PrivateConnectivity): + Private connectivity. + + This field is a member of `oneof`_ ``connectivity``. + """ + + host: str = proto.Field( + proto.STRING, + number=1, + ) + port: int = proto.Field( + proto.INT32, + number=2, + ) + username: str = proto.Field( + proto.STRING, + number=3, + ) + password: str = proto.Field( + proto.STRING, + number=4, + ) + password_set: bool = proto.Field( + proto.BOOL, + number=5, + ) + database_service: str = proto.Field( + proto.STRING, + number=6, + ) + static_service_ip_connectivity: 'StaticServiceIpConnectivity' = proto.Field( + proto.MESSAGE, + number=100, + oneof='connectivity', + message='StaticServiceIpConnectivity', + ) + forward_ssh_connectivity: 'ForwardSshTunnelConnectivity' = proto.Field( + proto.MESSAGE, + number=101, + oneof='connectivity', + message='ForwardSshTunnelConnectivity', + ) + private_connectivity: 'PrivateConnectivity' = proto.Field( + proto.MESSAGE, + number=102, + oneof='connectivity', + message='PrivateConnectivity', + ) + + +class CloudSqlConnectionProfile(proto.Message): + r"""Specifies required connection parameters, and, optionally, + the parameters required to create a Cloud SQL destination + database instance. + + Attributes: + cloud_sql_id (str): + Output only. The Cloud SQL instance ID that + this connection profile is associated with. + settings (google.cloud.clouddms_v1.types.CloudSqlSettings): + Immutable. Metadata used to create the + destination Cloud SQL database. + private_ip (str): + Output only. The Cloud SQL database + instance's private IP. + public_ip (str): + Output only. The Cloud SQL database + instance's public IP. + additional_public_ip (str): + Output only. The Cloud SQL database + instance's additional (outgoing) public IP. Used + when the Cloud SQL database availability type is + REGIONAL (i.e. multiple zones / highly + available). + """ + + cloud_sql_id: str = proto.Field( + proto.STRING, + number=1, + ) + settings: 'CloudSqlSettings' = proto.Field( + proto.MESSAGE, + number=2, + message='CloudSqlSettings', + ) + private_ip: str = proto.Field( + proto.STRING, + number=3, + ) + public_ip: str = proto.Field( + proto.STRING, + number=4, + ) + additional_public_ip: str = proto.Field( + proto.STRING, + number=5, + ) + + +class AlloyDbConnectionProfile(proto.Message): + r"""Specifies required connection parameters, and the parameters + required to create an AlloyDB destination cluster. + + Attributes: + cluster_id (str): + Required. The AlloyDB cluster ID that this + connection profile is associated with. + settings (google.cloud.clouddms_v1.types.AlloyDbSettings): + Immutable. Metadata used to create the + destination AlloyDB cluster. + """ + + cluster_id: str = proto.Field( + proto.STRING, + number=1, + ) + settings: 'AlloyDbSettings' = proto.Field( + proto.MESSAGE, + number=2, + message='AlloyDbSettings', + ) + + +class SqlAclEntry(proto.Message): + r"""An entry for an Access Control list. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + The allowlisted value for the access control + list. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + The time when this access control entry expires in `RFC + 3339 `__ format, for + example: ``2012-11-15T16:19:00.094Z``. + + This field is a member of `oneof`_ ``expiration``. + ttl (google.protobuf.duration_pb2.Duration): + Input only. The time-to-leave of this access + control entry. + + This field is a member of `oneof`_ ``expiration``. + label (str): + A label to identify this entry. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + oneof='expiration', + message=timestamp_pb2.Timestamp, + ) + ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=11, + oneof='expiration', + message=duration_pb2.Duration, + ) + label: str = proto.Field( + proto.STRING, + number=3, + ) + + +class SqlIpConfig(proto.Message): + r"""IP Management configuration. + + Attributes: + enable_ipv4 (google.protobuf.wrappers_pb2.BoolValue): + Whether the instance should be assigned an + IPv4 address or not. + private_network (str): + The resource link for the VPC network from which the Cloud + SQL instance is accessible for private IP. For example, + ``projects/myProject/global/networks/default``. This setting + can be updated, but it cannot be removed after it is set. + allocated_ip_range (str): + Optional. The name of the allocated IP + address range for the private IP Cloud SQL + instance. This name refers to an already + allocated IP range address. If set, the instance + IP address will be created in the allocated + range. Note that this IP address range can't be + modified after the instance is created. If you + change the VPC when configuring connectivity + settings for the migration job, this field is + not relevant. + require_ssl (google.protobuf.wrappers_pb2.BoolValue): + Whether SSL connections over IP should be + enforced or not. + authorized_networks (MutableSequence[google.cloud.clouddms_v1.types.SqlAclEntry]): + The list of external networks that are allowed to connect to + the instance using the IP. See + https://en.wikipedia.org/wiki/CIDR_notation#CIDR_notation, + also known as 'slash' notation (e.g. ``192.168.100.0/24``). + """ + + enable_ipv4: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.BoolValue, + ) + private_network: str = proto.Field( + proto.STRING, + number=2, + ) + allocated_ip_range: str = proto.Field( + proto.STRING, + number=5, + ) + require_ssl: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=3, + message=wrappers_pb2.BoolValue, + ) + authorized_networks: MutableSequence['SqlAclEntry'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='SqlAclEntry', + ) + + +class CloudSqlSettings(proto.Message): + r"""Settings for creating a Cloud SQL database instance. + + Attributes: + database_version (google.cloud.clouddms_v1.types.CloudSqlSettings.SqlDatabaseVersion): + The database engine type and version. + user_labels (MutableMapping[str, str]): + The resource labels for a Cloud SQL instance to use to + annotate any related underlying resources such as Compute + Engine VMs. An object containing a list of "key": "value" + pairs. + + Example: + ``{ "name": "wrench", "mass": "18kg", "count": "3" }``. + tier (str): + The tier (or machine type) for this instance, for example: + ``db-n1-standard-1`` (MySQL instances) or + ``db-custom-1-3840`` (PostgreSQL instances). For more + information, see `Cloud SQL Instance + Settings `__. + storage_auto_resize_limit (google.protobuf.wrappers_pb2.Int64Value): + The maximum size to which storage capacity + can be automatically increased. The default + value is 0, which specifies that there is no + limit. + activation_policy (google.cloud.clouddms_v1.types.CloudSqlSettings.SqlActivationPolicy): + The activation policy specifies when the instance is + activated; it is applicable only when the instance state is + 'RUNNABLE'. Valid values: + + 'ALWAYS': The instance is on, and remains so even in the + absence of connection requests. + + ``NEVER``: The instance is off; it is not activated, even if + a connection request arrives. + ip_config (google.cloud.clouddms_v1.types.SqlIpConfig): + The settings for IP Management. This allows + to enable or disable the instance IP and manage + which external networks can connect to the + instance. The IPv4 address cannot be disabled. + auto_storage_increase (google.protobuf.wrappers_pb2.BoolValue): + [default: ON] If you enable this setting, Cloud SQL checks + your available storage every 30 seconds. If the available + storage falls below a threshold size, Cloud SQL + automatically adds additional storage capacity. If the + available storage repeatedly falls below the threshold size, + Cloud SQL continues to add storage until it reaches the + maximum of 30 TB. + database_flags (MutableMapping[str, str]): + The database flags passed to the Cloud SQL + instance at startup. An object containing a list + of "key": value pairs. Example: { "name": + "wrench", "mass": "1.3kg", "count": "3" }. + data_disk_type (google.cloud.clouddms_v1.types.CloudSqlSettings.SqlDataDiskType): + The type of storage: ``PD_SSD`` (default) or ``PD_HDD``. + data_disk_size_gb (google.protobuf.wrappers_pb2.Int64Value): + The storage capacity available to the + database, in GB. The minimum (and default) size + is 10GB. + zone (str): + The Google Cloud Platform zone where your + Cloud SQL database instance is located. + secondary_zone (str): + Optional. The Google Cloud Platform zone + where the failover Cloud SQL database instance + is located. Used when the Cloud SQL database + availability type is REGIONAL (i.e. multiple + zones / highly available). + source_id (str): + The Database Migration Service source connection profile ID, + in the format: + ``projects/my_project_name/locations/us-central1/connectionProfiles/connection_profile_ID`` + root_password (str): + Input only. Initial root password. + root_password_set (bool): + Output only. Indicates If this connection + profile root password is stored. + collation (str): + The Cloud SQL default instance level + collation. + cmek_key_name (str): + The KMS key name used for the csql instance. + availability_type (google.cloud.clouddms_v1.types.CloudSqlSettings.SqlAvailabilityType): + Optional. Availability type. Potential values: + + - ``ZONAL``: The instance serves data from only one zone. + Outages in that zone affect data availability. + - ``REGIONAL``: The instance can serve data from more than + one zone in a region (it is highly available). + """ + class SqlActivationPolicy(proto.Enum): + r"""Specifies when the instance should be activated. + + Values: + SQL_ACTIVATION_POLICY_UNSPECIFIED (0): + unspecified policy. + ALWAYS (1): + The instance is always up and running. + NEVER (2): + The instance should never spin up. + """ + SQL_ACTIVATION_POLICY_UNSPECIFIED = 0 + ALWAYS = 1 + NEVER = 2 + + class SqlDataDiskType(proto.Enum): + r"""The storage options for Cloud SQL databases. + + Values: + SQL_DATA_DISK_TYPE_UNSPECIFIED (0): + Unspecified. + PD_SSD (1): + SSD disk. + PD_HDD (2): + HDD disk. + """ + SQL_DATA_DISK_TYPE_UNSPECIFIED = 0 + PD_SSD = 1 + PD_HDD = 2 + + class SqlDatabaseVersion(proto.Enum): + r"""The database engine type and version. + + Values: + SQL_DATABASE_VERSION_UNSPECIFIED (0): + Unspecified version. + MYSQL_5_6 (1): + MySQL 5.6. + MYSQL_5_7 (2): + MySQL 5.7. + POSTGRES_9_6 (3): + PostgreSQL 9.6. + POSTGRES_11 (4): + PostgreSQL 11. + POSTGRES_10 (5): + PostgreSQL 10. + MYSQL_8_0 (6): + MySQL 8.0. + POSTGRES_12 (7): + PostgreSQL 12. + POSTGRES_13 (8): + PostgreSQL 13. + POSTGRES_14 (17): + PostgreSQL 14. + """ + SQL_DATABASE_VERSION_UNSPECIFIED = 0 + MYSQL_5_6 = 1 + MYSQL_5_7 = 2 + POSTGRES_9_6 = 3 + POSTGRES_11 = 4 + POSTGRES_10 = 5 + MYSQL_8_0 = 6 + POSTGRES_12 = 7 + POSTGRES_13 = 8 + POSTGRES_14 = 17 + + class SqlAvailabilityType(proto.Enum): + r"""The availability type of the given Cloud SQL instance. + + Values: + SQL_AVAILABILITY_TYPE_UNSPECIFIED (0): + This is an unknown Availability type. + ZONAL (1): + Zonal availablility instance. + REGIONAL (2): + Regional availability instance. + """ + SQL_AVAILABILITY_TYPE_UNSPECIFIED = 0 + ZONAL = 1 + REGIONAL = 2 + + database_version: SqlDatabaseVersion = proto.Field( + proto.ENUM, + number=1, + enum=SqlDatabaseVersion, + ) + user_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + tier: str = proto.Field( + proto.STRING, + number=3, + ) + storage_auto_resize_limit: wrappers_pb2.Int64Value = proto.Field( + proto.MESSAGE, + number=4, + message=wrappers_pb2.Int64Value, + ) + activation_policy: SqlActivationPolicy = proto.Field( + proto.ENUM, + number=5, + enum=SqlActivationPolicy, + ) + ip_config: 'SqlIpConfig' = proto.Field( + proto.MESSAGE, + number=6, + message='SqlIpConfig', + ) + auto_storage_increase: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=7, + message=wrappers_pb2.BoolValue, + ) + database_flags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + data_disk_type: SqlDataDiskType = proto.Field( + proto.ENUM, + number=9, + enum=SqlDataDiskType, + ) + data_disk_size_gb: wrappers_pb2.Int64Value = proto.Field( + proto.MESSAGE, + number=10, + message=wrappers_pb2.Int64Value, + ) + zone: str = proto.Field( + proto.STRING, + number=11, + ) + secondary_zone: str = proto.Field( + proto.STRING, + number=18, + ) + source_id: str = proto.Field( + proto.STRING, + number=12, + ) + root_password: str = proto.Field( + proto.STRING, + number=13, + ) + root_password_set: bool = proto.Field( + proto.BOOL, + number=14, + ) + collation: str = proto.Field( + proto.STRING, + number=15, + ) + cmek_key_name: str = proto.Field( + proto.STRING, + number=16, + ) + availability_type: SqlAvailabilityType = proto.Field( + proto.ENUM, + number=17, + enum=SqlAvailabilityType, + ) + + +class AlloyDbSettings(proto.Message): + r"""Settings for creating an AlloyDB cluster. + + Attributes: + initial_user (google.cloud.clouddms_v1.types.AlloyDbSettings.UserPassword): + Required. Input only. Initial user to setup + during cluster creation. Required. + vpc_network (str): + Required. The resource link for the VPC network in which + cluster resources are created and from which they are + accessible via Private IP. The network must belong to the + same project as the cluster. It is specified in the form: + "projects/{project_number}/global/networks/{network_id}". + This is required to create a cluster. + labels (MutableMapping[str, str]): + Labels for the AlloyDB cluster created by + DMS. An object containing a list of 'key', + 'value' pairs. + primary_instance_settings (google.cloud.clouddms_v1.types.AlloyDbSettings.PrimaryInstanceSettings): + + encryption_config (google.cloud.clouddms_v1.types.AlloyDbSettings.EncryptionConfig): + Optional. The encryption config can be + specified to encrypt the data disks and other + persistent data resources of a cluster with a + customer-managed encryption key (CMEK). When + this field is not specified, the cluster will + then use default encryption scheme to protect + the user data. + """ + + class UserPassword(proto.Message): + r"""The username/password for a database user. Used for + specifying initial users at cluster creation time. + + Attributes: + user (str): + The database username. + password (str): + The initial password for the user. + password_set (bool): + Output only. Indicates if the initial_user.password field + has been set. + """ + + user: str = proto.Field( + proto.STRING, + number=1, + ) + password: str = proto.Field( + proto.STRING, + number=2, + ) + password_set: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class PrimaryInstanceSettings(proto.Message): + r"""Settings for the cluster's primary instance + + Attributes: + id (str): + Required. The ID of the AlloyDB primary instance. The ID + must satisfy the regex expression "[a-z0-9-]+". + machine_config (google.cloud.clouddms_v1.types.AlloyDbSettings.PrimaryInstanceSettings.MachineConfig): + Configuration for the machines that host the + underlying database engine. + database_flags (MutableMapping[str, str]): + Database flags to pass to AlloyDB when DMS is + creating the AlloyDB cluster and instances. See + the AlloyDB documentation for how these can be + used. + labels (MutableMapping[str, str]): + Labels for the AlloyDB primary instance + created by DMS. An object containing a list of + 'key', 'value' pairs. + private_ip (str): + Output only. The private IP address for the + Instance. This is the connection endpoint for an + end-user application. + """ + + class MachineConfig(proto.Message): + r"""MachineConfig describes the configuration of a machine. + + Attributes: + cpu_count (int): + The number of CPU's in the VM instance. + """ + + cpu_count: int = proto.Field( + proto.INT32, + number=1, + ) + + id: str = proto.Field( + proto.STRING, + number=1, + ) + machine_config: 'AlloyDbSettings.PrimaryInstanceSettings.MachineConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='AlloyDbSettings.PrimaryInstanceSettings.MachineConfig', + ) + database_flags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + private_ip: str = proto.Field( + proto.STRING, + number=8, + ) + + class EncryptionConfig(proto.Message): + r"""EncryptionConfig describes the encryption config of a cluster + that is encrypted with a CMEK (customer-managed encryption key). + + Attributes: + kms_key_name (str): + The fully-qualified resource name of the KMS key. Each Cloud + KMS key is regionalized and has the following format: + projects/[PROJECT]/locations/[REGION]/keyRings/[RING]/cryptoKeys/[KEY_NAME] + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=1, + ) + + initial_user: UserPassword = proto.Field( + proto.MESSAGE, + number=1, + message=UserPassword, + ) + vpc_network: str = proto.Field( + proto.STRING, + number=2, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + primary_instance_settings: PrimaryInstanceSettings = proto.Field( + proto.MESSAGE, + number=4, + message=PrimaryInstanceSettings, + ) + encryption_config: EncryptionConfig = proto.Field( + proto.MESSAGE, + number=5, + message=EncryptionConfig, + ) + + +class StaticIpConnectivity(proto.Message): + r"""The source database will allow incoming connections from the + public IP of the destination database. You can retrieve the + public IP of the Cloud SQL instance from the Cloud SQL console + or using Cloud SQL APIs. No additional configuration is + required. + + """ + + +class PrivateServiceConnectConnectivity(proto.Message): + r"""Private Service Connect connectivity + (https://cloud.google.com/vpc/docs/private-service-connect#service-attachments) + + Attributes: + service_attachment (str): + Required. A service attachment that exposes a database, and + has the following format: + projects/{project}/regions/{region}/serviceAttachments/{service_attachment_name} + """ + + service_attachment: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ReverseSshConnectivity(proto.Message): + r"""The details needed to configure a reverse SSH tunnel between + the source and destination databases. These details will be used + when calling the generateSshScript method (see + https://cloud.google.com/database-migration/docs/reference/rest/v1/projects.locations.migrationJobs/generateSshScript) + to produce the script that will help set up the reverse SSH + tunnel, and to set up the VPC peering between the Cloud SQL + private network and the VPC. + + Attributes: + vm_ip (str): + Required. The IP of the virtual machine + (Compute Engine) used as the bastion server for + the SSH tunnel. + vm_port (int): + Required. The forwarding port of the virtual + machine (Compute Engine) used as the bastion + server for the SSH tunnel. + vm (str): + The name of the virtual machine (Compute + Engine) used as the bastion server for the SSH + tunnel. + vpc (str): + The name of the VPC to peer with the Cloud + SQL private network. + """ + + vm_ip: str = proto.Field( + proto.STRING, + number=1, + ) + vm_port: int = proto.Field( + proto.INT32, + number=2, + ) + vm: str = proto.Field( + proto.STRING, + number=3, + ) + vpc: str = proto.Field( + proto.STRING, + number=4, + ) + + +class VpcPeeringConnectivity(proto.Message): + r"""The details of the VPC where the source database is located + in Google Cloud. We will use this information to set up the VPC + peering connection between Cloud SQL and this VPC. + + Attributes: + vpc (str): + The name of the VPC network to peer with the + Cloud SQL private network. + """ + + vpc: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ForwardSshTunnelConnectivity(proto.Message): + r"""Forward SSH Tunnel connectivity. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hostname (str): + Required. Hostname for the SSH tunnel. + username (str): + Required. Username for the SSH tunnel. + port (int): + Port for the SSH tunnel, default value is 22. + password (str): + Input only. SSH password. + + This field is a member of `oneof`_ ``authentication_method``. + private_key (str): + Input only. SSH private key. + + This field is a member of `oneof`_ ``authentication_method``. + """ + + hostname: str = proto.Field( + proto.STRING, + number=1, + ) + username: str = proto.Field( + proto.STRING, + number=2, + ) + port: int = proto.Field( + proto.INT32, + number=3, + ) + password: str = proto.Field( + proto.STRING, + number=100, + oneof='authentication_method', + ) + private_key: str = proto.Field( + proto.STRING, + number=101, + oneof='authentication_method', + ) + + +class StaticServiceIpConnectivity(proto.Message): + r"""Static IP address connectivity configured on service project. + """ + + +class PrivateConnectivity(proto.Message): + r"""Private Connectivity. + + Attributes: + private_connection (str): + Required. The resource name (URI) of the + private connection. + """ + + private_connection: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DatabaseType(proto.Message): + r"""A message defining the database engine and provider. + + Attributes: + provider (google.cloud.clouddms_v1.types.DatabaseProvider): + The database provider. + engine (google.cloud.clouddms_v1.types.DatabaseEngine): + The database engine. + """ + + provider: 'DatabaseProvider' = proto.Field( + proto.ENUM, + number=1, + enum='DatabaseProvider', + ) + engine: 'DatabaseEngine' = proto.Field( + proto.ENUM, + number=2, + enum='DatabaseEngine', + ) + + +class MigrationJob(proto.Message): + r"""Represents a Database Migration Service migration job object. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The name (URI) of this migration job + resource, in the form of: + projects/{project}/locations/{location}/migrationJobs/{migrationJob}. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the migration + job resource was created. A timestamp in RFC3339 + UTC "Zulu" format, accurate to nanoseconds. + Example: "2014-10-02T15:01:23.045123456Z". + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the migration + job resource was last updated. A timestamp in + RFC3339 UTC "Zulu" format, accurate to + nanoseconds. Example: + "2014-10-02T15:01:23.045123456Z". + labels (MutableMapping[str, str]): + The resource labels for migration job to use to annotate any + related underlying resources such as Compute Engine VMs. An + object containing a list of "key": "value" pairs. + + Example: + ``{ "name": "wrench", "mass": "1.3kg", "count": "3" }``. + display_name (str): + The migration job display name. + state (google.cloud.clouddms_v1.types.MigrationJob.State): + The current migration job state. + phase (google.cloud.clouddms_v1.types.MigrationJob.Phase): + Output only. The current migration job phase. + type_ (google.cloud.clouddms_v1.types.MigrationJob.Type): + Required. The migration job type. + dump_path (str): + The path to the dump file in Google Cloud Storage, in the + format: (gs://[BUCKET_NAME]/[OBJECT_NAME]). This field and + the "dump_flags" field are mutually exclusive. + dump_flags (google.cloud.clouddms_v1.types.MigrationJob.DumpFlags): + The initial dump flags. This field and the "dump_path" field + are mutually exclusive. + source (str): + Required. The resource name (URI) of the + source connection profile. + destination (str): + Required. The resource name (URI) of the + destination connection profile. + reverse_ssh_connectivity (google.cloud.clouddms_v1.types.ReverseSshConnectivity): + The details needed to communicate to the + source over Reverse SSH tunnel connectivity. + + This field is a member of `oneof`_ ``connectivity``. + vpc_peering_connectivity (google.cloud.clouddms_v1.types.VpcPeeringConnectivity): + The details of the VPC network that the + source database is located in. + + This field is a member of `oneof`_ ``connectivity``. + static_ip_connectivity (google.cloud.clouddms_v1.types.StaticIpConnectivity): + static ip connectivity data (default, no + additional details needed). + + This field is a member of `oneof`_ ``connectivity``. + duration (google.protobuf.duration_pb2.Duration): + Output only. The duration of the migration + job (in seconds). A duration in seconds with up + to nine fractional digits, terminated by 's'. + Example: "3.5s". + error (google.rpc.status_pb2.Status): + Output only. The error details in case of + state FAILED. + source_database (google.cloud.clouddms_v1.types.DatabaseType): + The database engine type and provider of the + source. + destination_database (google.cloud.clouddms_v1.types.DatabaseType): + The database engine type and provider of the + destination. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. If the migration job is + completed, the time when it was completed. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspaceInfo): + The conversion workspace used by the + migration. + filter (str): + This field can be used to select the entities + to migrate as part of the migration job. It uses + AIP-160 notation to select a subset of the + entities configured on the associated + conversion-workspace. This field should not be + set on migration-jobs that are not associated + with a conversion workspace. + cmek_key_name (str): + The CMEK (customer-managed encryption key) fully qualified + key name used for the migration job. This field supports all + migration jobs types except for: + + - Mysql to Mysql (use the cmek field in the cloudsql + connection profile instead). + - PostrgeSQL to PostgreSQL (use the cmek field in the + cloudsql connection profile instead). + - PostgreSQL to AlloyDB (use the kms_key_name field in the + alloydb connection profile instead). Each Cloud CMEK key + has the following format: + projects/[PROJECT]/locations/[REGION]/keyRings/[RING]/cryptoKeys/[KEY_NAME] + """ + class State(proto.Enum): + r"""The current migration job states. + + Values: + STATE_UNSPECIFIED (0): + The state of the migration job is unknown. + MAINTENANCE (1): + The migration job is down for maintenance. + DRAFT (2): + The migration job is in draft mode and no + resources are created. + CREATING (3): + The migration job is being created. + NOT_STARTED (4): + The migration job is created and not started. + RUNNING (5): + The migration job is running. + FAILED (6): + The migration job failed. + COMPLETED (7): + The migration job has been completed. + DELETING (8): + The migration job is being deleted. + STOPPING (9): + The migration job is being stopped. + STOPPED (10): + The migration job is currently stopped. + DELETED (11): + The migration job has been deleted. + UPDATING (12): + The migration job is being updated. + STARTING (13): + The migration job is starting. + RESTARTING (14): + The migration job is restarting. + RESUMING (15): + The migration job is resuming. + """ + STATE_UNSPECIFIED = 0 + MAINTENANCE = 1 + DRAFT = 2 + CREATING = 3 + NOT_STARTED = 4 + RUNNING = 5 + FAILED = 6 + COMPLETED = 7 + DELETING = 8 + STOPPING = 9 + STOPPED = 10 + DELETED = 11 + UPDATING = 12 + STARTING = 13 + RESTARTING = 14 + RESUMING = 15 + + class Phase(proto.Enum): + r"""The current migration job phase. + + Values: + PHASE_UNSPECIFIED (0): + The phase of the migration job is unknown. + FULL_DUMP (1): + The migration job is in the full dump phase. + CDC (2): + The migration job is CDC phase. + PROMOTE_IN_PROGRESS (3): + The migration job is running the promote + phase. + WAITING_FOR_SOURCE_WRITES_TO_STOP (4): + Only RDS flow - waiting for source writes to + stop + PREPARING_THE_DUMP (5): + Only RDS flow - the sources writes stopped, + waiting for dump to begin + """ + PHASE_UNSPECIFIED = 0 + FULL_DUMP = 1 + CDC = 2 + PROMOTE_IN_PROGRESS = 3 + WAITING_FOR_SOURCE_WRITES_TO_STOP = 4 + PREPARING_THE_DUMP = 5 + + class Type(proto.Enum): + r"""The type of migration job (one-time or continuous). + + Values: + TYPE_UNSPECIFIED (0): + The type of the migration job is unknown. + ONE_TIME (1): + The migration job is a one time migration. + CONTINUOUS (2): + The migration job is a continuous migration. + """ + TYPE_UNSPECIFIED = 0 + ONE_TIME = 1 + CONTINUOUS = 2 + + class DumpFlag(proto.Message): + r"""Dump flag definition. + + Attributes: + name (str): + The name of the flag + value (str): + The value of the flag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + value: str = proto.Field( + proto.STRING, + number=2, + ) + + class DumpFlags(proto.Message): + r"""Dump flags definition. + + Attributes: + dump_flags (MutableSequence[google.cloud.clouddms_v1.types.MigrationJob.DumpFlag]): + The flags for the initial dump. + """ + + dump_flags: MutableSequence['MigrationJob.DumpFlag'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='MigrationJob.DumpFlag', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + phase: Phase = proto.Field( + proto.ENUM, + number=7, + enum=Phase, + ) + type_: Type = proto.Field( + proto.ENUM, + number=8, + enum=Type, + ) + dump_path: str = proto.Field( + proto.STRING, + number=9, + ) + dump_flags: DumpFlags = proto.Field( + proto.MESSAGE, + number=17, + message=DumpFlags, + ) + source: str = proto.Field( + proto.STRING, + number=10, + ) + destination: str = proto.Field( + proto.STRING, + number=11, + ) + reverse_ssh_connectivity: 'ReverseSshConnectivity' = proto.Field( + proto.MESSAGE, + number=101, + oneof='connectivity', + message='ReverseSshConnectivity', + ) + vpc_peering_connectivity: 'VpcPeeringConnectivity' = proto.Field( + proto.MESSAGE, + number=102, + oneof='connectivity', + message='VpcPeeringConnectivity', + ) + static_ip_connectivity: 'StaticIpConnectivity' = proto.Field( + proto.MESSAGE, + number=103, + oneof='connectivity', + message='StaticIpConnectivity', + ) + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=12, + message=duration_pb2.Duration, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=13, + message=status_pb2.Status, + ) + source_database: 'DatabaseType' = proto.Field( + proto.MESSAGE, + number=14, + message='DatabaseType', + ) + destination_database: 'DatabaseType' = proto.Field( + proto.MESSAGE, + number=15, + message='DatabaseType', + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=16, + message=timestamp_pb2.Timestamp, + ) + conversion_workspace: 'ConversionWorkspaceInfo' = proto.Field( + proto.MESSAGE, + number=18, + message='ConversionWorkspaceInfo', + ) + filter: str = proto.Field( + proto.STRING, + number=20, + ) + cmek_key_name: str = proto.Field( + proto.STRING, + number=21, + ) + + +class ConversionWorkspaceInfo(proto.Message): + r"""A conversion workspace's version. + + Attributes: + name (str): + The resource name (URI) of the conversion + workspace. + commit_id (str): + The commit ID of the conversion workspace. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + commit_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ConnectionProfile(proto.Message): + r"""A connection profile definition. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The name of this connection profile resource + in the form of + projects/{project}/locations/{location}/connectionProfiles/{connectionProfile}. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. A timestamp in RFC3339 UTC "Zulu" + format, accurate to nanoseconds. Example: + "2014-10-02T15:01:23.045123456Z". + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was last updated. A timestamp in RFC3339 UTC + "Zulu" format, accurate to nanoseconds. Example: + "2014-10-02T15:01:23.045123456Z". + labels (MutableMapping[str, str]): + The resource labels for connection profile to use to + annotate any related underlying resources such as Compute + Engine VMs. An object containing a list of "key": "value" + pairs. + + Example: + ``{ "name": "wrench", "mass": "1.3kg", "count": "3" }``. + state (google.cloud.clouddms_v1.types.ConnectionProfile.State): + The current connection profile state (e.g. + DRAFT, READY, or FAILED). + display_name (str): + The connection profile display name. + mysql (google.cloud.clouddms_v1.types.MySqlConnectionProfile): + A MySQL database connection profile. + + This field is a member of `oneof`_ ``connection_profile``. + postgresql (google.cloud.clouddms_v1.types.PostgreSqlConnectionProfile): + A PostgreSQL database connection profile. + + This field is a member of `oneof`_ ``connection_profile``. + oracle (google.cloud.clouddms_v1.types.OracleConnectionProfile): + An Oracle database connection profile. + + This field is a member of `oneof`_ ``connection_profile``. + cloudsql (google.cloud.clouddms_v1.types.CloudSqlConnectionProfile): + A CloudSQL database connection profile. + + This field is a member of `oneof`_ ``connection_profile``. + alloydb (google.cloud.clouddms_v1.types.AlloyDbConnectionProfile): + An AlloyDB cluster connection profile. + + This field is a member of `oneof`_ ``connection_profile``. + error (google.rpc.status_pb2.Status): + Output only. The error details in case of + state FAILED. + provider (google.cloud.clouddms_v1.types.DatabaseProvider): + The database provider. + """ + class State(proto.Enum): + r"""The current connection profile state (e.g. DRAFT, READY, or + FAILED). + + Values: + STATE_UNSPECIFIED (0): + The state of the connection profile is + unknown. + DRAFT (1): + The connection profile is in draft mode and + fully editable. + CREATING (2): + The connection profile is being created. + READY (3): + The connection profile is ready. + UPDATING (4): + The connection profile is being updated. + DELETING (5): + The connection profile is being deleted. + DELETED (6): + The connection profile has been deleted. + FAILED (7): + The last action on the connection profile + failed. + """ + STATE_UNSPECIFIED = 0 + DRAFT = 1 + CREATING = 2 + READY = 3 + UPDATING = 4 + DELETING = 5 + DELETED = 6 + FAILED = 7 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + mysql: 'MySqlConnectionProfile' = proto.Field( + proto.MESSAGE, + number=100, + oneof='connection_profile', + message='MySqlConnectionProfile', + ) + postgresql: 'PostgreSqlConnectionProfile' = proto.Field( + proto.MESSAGE, + number=101, + oneof='connection_profile', + message='PostgreSqlConnectionProfile', + ) + oracle: 'OracleConnectionProfile' = proto.Field( + proto.MESSAGE, + number=104, + oneof='connection_profile', + message='OracleConnectionProfile', + ) + cloudsql: 'CloudSqlConnectionProfile' = proto.Field( + proto.MESSAGE, + number=102, + oneof='connection_profile', + message='CloudSqlConnectionProfile', + ) + alloydb: 'AlloyDbConnectionProfile' = proto.Field( + proto.MESSAGE, + number=105, + oneof='connection_profile', + message='AlloyDbConnectionProfile', + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=7, + message=status_pb2.Status, + ) + provider: 'DatabaseProvider' = proto.Field( + proto.ENUM, + number=8, + enum='DatabaseProvider', + ) + + +class MigrationJobVerificationError(proto.Message): + r"""Error message of a verification Migration job. + + Attributes: + error_code (google.cloud.clouddms_v1.types.MigrationJobVerificationError.ErrorCode): + Output only. An instance of ErrorCode + specifying the error that occurred. + error_message (str): + Output only. A formatted message with further + details about the error and a CTA. + error_detail_message (str): + Output only. A specific detailed error + message, if supplied by the engine. + """ + class ErrorCode(proto.Enum): + r"""A general error code describing the type of error that + occurred. + + Values: + ERROR_CODE_UNSPECIFIED (0): + An unknown error occurred + CONNECTION_FAILURE (1): + We failed to connect to one of the connection + profile. + AUTHENTICATION_FAILURE (2): + We failed to authenticate to one of the + connection profile. + INVALID_CONNECTION_PROFILE_CONFIG (3): + One of the involved connection profiles has + an invalid configuration. + VERSION_INCOMPATIBILITY (4): + The versions of the source and the + destination are incompatible. + CONNECTION_PROFILE_TYPES_INCOMPATIBILITY (5): + The types of the source and the destination + are incompatible. + NO_PGLOGICAL_INSTALLED (7): + No pglogical extension installed on + databases, applicable for postgres. + PGLOGICAL_NODE_ALREADY_EXISTS (8): + pglogical node already exists on databases, + applicable for postgres. + INVALID_WAL_LEVEL (9): + The value of parameter wal_level is not set to logical. + INVALID_SHARED_PRELOAD_LIBRARY (10): + The value of parameter shared_preload_libraries does not + include pglogical. + INSUFFICIENT_MAX_REPLICATION_SLOTS (11): + The value of parameter max_replication_slots is not + sufficient. + INSUFFICIENT_MAX_WAL_SENDERS (12): + The value of parameter max_wal_senders is not sufficient. + INSUFFICIENT_MAX_WORKER_PROCESSES (13): + The value of parameter max_worker_processes is not + sufficient. + UNSUPPORTED_EXTENSIONS (14): + Extensions installed are either not supported + or having unsupported versions. + UNSUPPORTED_MIGRATION_TYPE (15): + Unsupported migration type. + INVALID_RDS_LOGICAL_REPLICATION (16): + Invalid RDS logical replication. + UNSUPPORTED_GTID_MODE (17): + The gtid_mode is not supported, applicable for MySQL. + UNSUPPORTED_TABLE_DEFINITION (18): + The table definition is not support due to + missing primary key or replica identity. + UNSUPPORTED_DEFINER (19): + The definer is not supported. + CANT_RESTART_RUNNING_MIGRATION (21): + Migration is already running at the time of + restart request. + TABLES_WITH_LIMITED_SUPPORT (24): + The source has tables with limited support. + E.g. PostgreSQL tables without primary keys. + UNSUPPORTED_DATABASE_LOCALE (25): + The source uses an unsupported locale. + UNSUPPORTED_DATABASE_FDW_CONFIG (26): + The source uses an unsupported Foreign Data + Wrapper configuration. + ERROR_RDBMS (27): + There was an underlying RDBMS error. + SOURCE_SIZE_EXCEEDS_THRESHOLD (28): + The source DB size in Bytes exceeds a certain + threshold. The migration might require an + increase of quota, or might not be supported. + """ + ERROR_CODE_UNSPECIFIED = 0 + CONNECTION_FAILURE = 1 + AUTHENTICATION_FAILURE = 2 + INVALID_CONNECTION_PROFILE_CONFIG = 3 + VERSION_INCOMPATIBILITY = 4 + CONNECTION_PROFILE_TYPES_INCOMPATIBILITY = 5 + NO_PGLOGICAL_INSTALLED = 7 + PGLOGICAL_NODE_ALREADY_EXISTS = 8 + INVALID_WAL_LEVEL = 9 + INVALID_SHARED_PRELOAD_LIBRARY = 10 + INSUFFICIENT_MAX_REPLICATION_SLOTS = 11 + INSUFFICIENT_MAX_WAL_SENDERS = 12 + INSUFFICIENT_MAX_WORKER_PROCESSES = 13 + UNSUPPORTED_EXTENSIONS = 14 + UNSUPPORTED_MIGRATION_TYPE = 15 + INVALID_RDS_LOGICAL_REPLICATION = 16 + UNSUPPORTED_GTID_MODE = 17 + UNSUPPORTED_TABLE_DEFINITION = 18 + UNSUPPORTED_DEFINER = 19 + CANT_RESTART_RUNNING_MIGRATION = 21 + TABLES_WITH_LIMITED_SUPPORT = 24 + UNSUPPORTED_DATABASE_LOCALE = 25 + UNSUPPORTED_DATABASE_FDW_CONFIG = 26 + ERROR_RDBMS = 27 + SOURCE_SIZE_EXCEEDS_THRESHOLD = 28 + + error_code: ErrorCode = proto.Field( + proto.ENUM, + number=1, + enum=ErrorCode, + ) + error_message: str = proto.Field( + proto.STRING, + number=2, + ) + error_detail_message: str = proto.Field( + proto.STRING, + number=3, + ) + + +class PrivateConnection(proto.Message): + r"""The PrivateConnection resource is used to establish private + connectivity with the customer's network. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The name of the resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The create time of the resource. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update time of the + resource. + labels (MutableMapping[str, str]): + The resource labels for private connections to use to + annotate any related underlying resources such as Compute + Engine VMs. An object containing a list of "key": "value" + pairs. + + Example: + ``{ "name": "wrench", "mass": "1.3kg", "count": "3" }``. + display_name (str): + The private connection display name. + state (google.cloud.clouddms_v1.types.PrivateConnection.State): + Output only. The state of the private + connection. + error (google.rpc.status_pb2.Status): + Output only. The error details in case of + state FAILED. + vpc_peering_config (google.cloud.clouddms_v1.types.VpcPeeringConfig): + VPC peering configuration. + + This field is a member of `oneof`_ ``connectivity``. + """ + class State(proto.Enum): + r"""Private Connection state. + + Values: + STATE_UNSPECIFIED (0): + No description available. + CREATING (1): + The private connection is in creation state - + creating resources. + CREATED (2): + The private connection has been created with + all of its resources. + FAILED (3): + The private connection creation has failed. + DELETING (4): + The private connection is being deleted. + FAILED_TO_DELETE (5): + Delete request has failed, resource is in + invalid state. + DELETED (6): + The private connection has been deleted. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + CREATED = 2 + FAILED = 3 + DELETING = 4 + FAILED_TO_DELETE = 5 + DELETED = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=7, + message=status_pb2.Status, + ) + vpc_peering_config: 'VpcPeeringConfig' = proto.Field( + proto.MESSAGE, + number=100, + oneof='connectivity', + message='VpcPeeringConfig', + ) + + +class VpcPeeringConfig(proto.Message): + r"""The VPC peering configuration is used to create VPC peering + with the consumer's VPC. + + Attributes: + vpc_name (str): + Required. Fully qualified name of the VPC + that Database Migration Service will peer to. + subnet (str): + Required. A free subnet for peering. (CIDR of + /29) + """ + + vpc_name: str = proto.Field( + proto.STRING, + number=1, + ) + subnet: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/types/conversionworkspace_resources.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/types/conversionworkspace_resources.py new file mode 100644 index 0000000..760ba47 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/clouddms_v1/types/conversionworkspace_resources.py @@ -0,0 +1,1221 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.clouddms_v1.types import clouddms_resources +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.clouddms.v1', + manifest={ + 'DatabaseEntityType', + 'BackgroundJobType', + 'ImportRulesFileFormat', + 'DatabaseEngineInfo', + 'ConversionWorkspace', + 'BackgroundJobLogEntry', + 'DatabaseEntity', + 'SchemaEntity', + 'TableEntity', + 'ColumnEntity', + 'ConstraintEntity', + 'IndexEntity', + 'TriggerEntity', + 'ViewEntity', + 'SequenceEntity', + 'StoredProcedureEntity', + 'FunctionEntity', + 'SynonymEntity', + 'PackageEntity', + 'EntityMapping', + 'EntityMappingLogEntry', + }, +) + + +class DatabaseEntityType(proto.Enum): + r"""The type of database entities supported, + + Values: + DATABASE_ENTITY_TYPE_UNSPECIFIED (0): + Unspecified database entity type. + DATABASE_ENTITY_TYPE_SCHEMA (1): + Schema. + DATABASE_ENTITY_TYPE_TABLE (2): + Table. + DATABASE_ENTITY_TYPE_COLUMN (3): + Column. + DATABASE_ENTITY_TYPE_CONSTRAINT (4): + Constraint. + DATABASE_ENTITY_TYPE_INDEX (5): + Index. + DATABASE_ENTITY_TYPE_TRIGGER (6): + Trigger. + DATABASE_ENTITY_TYPE_VIEW (7): + View. + DATABASE_ENTITY_TYPE_SEQUENCE (8): + Sequence. + DATABASE_ENTITY_TYPE_STORED_PROCEDURE (9): + Stored Procedure. + DATABASE_ENTITY_TYPE_FUNCTION (10): + Function. + DATABASE_ENTITY_TYPE_SYNONYM (11): + Synonym. + DATABASE_ENTITY_TYPE_DATABASE_PACKAGE (12): + Package. + DATABASE_ENTITY_TYPE_UDT (13): + UDT. + DATABASE_ENTITY_TYPE_MATERIALIZED_VIEW (14): + Materialized View. + DATABASE_ENTITY_TYPE_DATABASE (15): + Database. + """ + DATABASE_ENTITY_TYPE_UNSPECIFIED = 0 + DATABASE_ENTITY_TYPE_SCHEMA = 1 + DATABASE_ENTITY_TYPE_TABLE = 2 + DATABASE_ENTITY_TYPE_COLUMN = 3 + DATABASE_ENTITY_TYPE_CONSTRAINT = 4 + DATABASE_ENTITY_TYPE_INDEX = 5 + DATABASE_ENTITY_TYPE_TRIGGER = 6 + DATABASE_ENTITY_TYPE_VIEW = 7 + DATABASE_ENTITY_TYPE_SEQUENCE = 8 + DATABASE_ENTITY_TYPE_STORED_PROCEDURE = 9 + DATABASE_ENTITY_TYPE_FUNCTION = 10 + DATABASE_ENTITY_TYPE_SYNONYM = 11 + DATABASE_ENTITY_TYPE_DATABASE_PACKAGE = 12 + DATABASE_ENTITY_TYPE_UDT = 13 + DATABASE_ENTITY_TYPE_MATERIALIZED_VIEW = 14 + DATABASE_ENTITY_TYPE_DATABASE = 15 + + +class BackgroundJobType(proto.Enum): + r"""The types of jobs that can be executed in the background. + + Values: + BACKGROUND_JOB_TYPE_UNSPECIFIED (0): + Unspecified background job type. + BACKGROUND_JOB_TYPE_SOURCE_SEED (1): + Job to seed from the source database. + BACKGROUND_JOB_TYPE_CONVERT (2): + Job to convert the source database into a + draft of the destination database. + BACKGROUND_JOB_TYPE_APPLY_DESTINATION (3): + Job to apply the draft tree onto the + destination. + BACKGROUND_JOB_TYPE_IMPORT_RULES_FILE (5): + Job to import and convert mapping rules from + an external source such as an ora2pg config + file. + """ + BACKGROUND_JOB_TYPE_UNSPECIFIED = 0 + BACKGROUND_JOB_TYPE_SOURCE_SEED = 1 + BACKGROUND_JOB_TYPE_CONVERT = 2 + BACKGROUND_JOB_TYPE_APPLY_DESTINATION = 3 + BACKGROUND_JOB_TYPE_IMPORT_RULES_FILE = 5 + + +class ImportRulesFileFormat(proto.Enum): + r"""The format for the import rules file. + + Values: + IMPORT_RULES_FILE_FORMAT_UNSPECIFIED (0): + Unspecified rules format. + IMPORT_RULES_FILE_FORMAT_HARBOUR_BRIDGE_SESSION_FILE (1): + HarbourBridge session file. + IMPORT_RULES_FILE_FORMAT_ORATOPG_CONFIG_FILE (2): + Ora2Pg configuration file. + """ + IMPORT_RULES_FILE_FORMAT_UNSPECIFIED = 0 + IMPORT_RULES_FILE_FORMAT_HARBOUR_BRIDGE_SESSION_FILE = 1 + IMPORT_RULES_FILE_FORMAT_ORATOPG_CONFIG_FILE = 2 + + +class DatabaseEngineInfo(proto.Message): + r"""The type and version of a source or destination database. + + Attributes: + engine (google.cloud.clouddms_v1.types.DatabaseEngine): + Required. Engine type. + version (str): + Required. Engine named version, for example + 12.c.1. + """ + + engine: clouddms_resources.DatabaseEngine = proto.Field( + proto.ENUM, + number=1, + enum=clouddms_resources.DatabaseEngine, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ConversionWorkspace(proto.Message): + r"""The main conversion workspace resource entity. + + Attributes: + name (str): + Full name of the workspace resource, in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + source (google.cloud.clouddms_v1.types.DatabaseEngineInfo): + Required. The source engine details. + destination (google.cloud.clouddms_v1.types.DatabaseEngineInfo): + Required. The destination engine details. + global_settings (MutableMapping[str, str]): + A generic list of settings for the workspace. The settings + are database pair dependant and can indicate default + behavior for the mapping rules engine or turn on or off + specific features. Such examples can be: + convert_foreign_key_to_interleave=true, skip_triggers=false, + ignore_non_table_synonyms=true + has_uncommitted_changes (bool): + Output only. Whether the workspace has + uncommitted changes (changes which were made + after the workspace was committed). + latest_commit_id (str): + Output only. The latest commit ID. + latest_commit_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the workspace + was committed. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the workspace + resource was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the workspace + resource was last updated. + display_name (str): + The display name for the workspace. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + source: 'DatabaseEngineInfo' = proto.Field( + proto.MESSAGE, + number=2, + message='DatabaseEngineInfo', + ) + destination: 'DatabaseEngineInfo' = proto.Field( + proto.MESSAGE, + number=3, + message='DatabaseEngineInfo', + ) + global_settings: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + has_uncommitted_changes: bool = proto.Field( + proto.BOOL, + number=5, + ) + latest_commit_id: str = proto.Field( + proto.STRING, + number=6, + ) + latest_commit_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + display_name: str = proto.Field( + proto.STRING, + number=11, + ) + + +class BackgroundJobLogEntry(proto.Message): + r"""Execution log of a background job. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + The background job log entry ID. + job_type (google.cloud.clouddms_v1.types.BackgroundJobType): + The type of job that was executed. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp when the background job was + started. + finish_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp when the background job was + finished. + completion_state (google.cloud.clouddms_v1.types.BackgroundJobLogEntry.JobCompletionState): + Job completion state, i.e. the final state + after the job completed. + completion_comment (str): + Job completion comment, such as how many + entities were seeded, how many warnings were + found during conversion, and similar + information. + request_autocommit (bool): + Whether the client requested the conversion + workspace to be committed after a successful + completion of the job. + seed_job_details (google.cloud.clouddms_v1.types.BackgroundJobLogEntry.SeedJobDetails): + Seed job details. + + This field is a member of `oneof`_ ``job_details``. + import_rules_job_details (google.cloud.clouddms_v1.types.BackgroundJobLogEntry.ImportRulesJobDetails): + Import rules job details. + + This field is a member of `oneof`_ ``job_details``. + convert_job_details (google.cloud.clouddms_v1.types.BackgroundJobLogEntry.ConvertJobDetails): + Convert job details. + + This field is a member of `oneof`_ ``job_details``. + apply_job_details (google.cloud.clouddms_v1.types.BackgroundJobLogEntry.ApplyJobDetails): + Apply job details. + + This field is a member of `oneof`_ ``job_details``. + """ + class JobCompletionState(proto.Enum): + r"""Final state after a job completes. + + Values: + JOB_COMPLETION_STATE_UNSPECIFIED (0): + The status is not specified. This state is + used when job is not yet finished. + SUCCEEDED (1): + Success. + FAILED (2): + Error. + """ + JOB_COMPLETION_STATE_UNSPECIFIED = 0 + SUCCEEDED = 1 + FAILED = 2 + + class SeedJobDetails(proto.Message): + r"""Details regarding a Seed background job. + + Attributes: + connection_profile (str): + The connection profile which was used for the + seed job. + """ + + connection_profile: str = proto.Field( + proto.STRING, + number=1, + ) + + class ImportRulesJobDetails(proto.Message): + r"""Details regarding an Import Rules background job. + + Attributes: + files (MutableSequence[str]): + File names used for the import rules job. + file_format (google.cloud.clouddms_v1.types.ImportRulesFileFormat): + The requested file format. + """ + + files: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + file_format: 'ImportRulesFileFormat' = proto.Field( + proto.ENUM, + number=2, + enum='ImportRulesFileFormat', + ) + + class ConvertJobDetails(proto.Message): + r"""Details regarding a Convert background job. + + Attributes: + filter (str): + AIP-160 based filter used to specify the + entities to convert + """ + + filter: str = proto.Field( + proto.STRING, + number=1, + ) + + class ApplyJobDetails(proto.Message): + r"""Details regarding an Apply background job. + + Attributes: + connection_profile (str): + The connection profile which was used for the + apply job. + filter (str): + AIP-160 based filter used to specify the + entities to apply + """ + + connection_profile: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + + id: str = proto.Field( + proto.STRING, + number=1, + ) + job_type: 'BackgroundJobType' = proto.Field( + proto.ENUM, + number=2, + enum='BackgroundJobType', + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + finish_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + completion_state: JobCompletionState = proto.Field( + proto.ENUM, + number=5, + enum=JobCompletionState, + ) + completion_comment: str = proto.Field( + proto.STRING, + number=6, + ) + request_autocommit: bool = proto.Field( + proto.BOOL, + number=7, + ) + seed_job_details: SeedJobDetails = proto.Field( + proto.MESSAGE, + number=100, + oneof='job_details', + message=SeedJobDetails, + ) + import_rules_job_details: ImportRulesJobDetails = proto.Field( + proto.MESSAGE, + number=101, + oneof='job_details', + message=ImportRulesJobDetails, + ) + convert_job_details: ConvertJobDetails = proto.Field( + proto.MESSAGE, + number=102, + oneof='job_details', + message=ConvertJobDetails, + ) + apply_job_details: ApplyJobDetails = proto.Field( + proto.MESSAGE, + number=103, + oneof='job_details', + message=ApplyJobDetails, + ) + + +class DatabaseEntity(proto.Message): + r"""The base entity type for all the database related entities. + The message contains the entity name, the name of its parent, + the entity type, and the specific details per entity type. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + short_name (str): + The short name (e.g. table name) of the + entity. + parent_entity (str): + The full name of the parent entity (e.g. + schema name). + tree (google.cloud.clouddms_v1.types.DatabaseEntity.TreeType): + The type of tree the entity belongs to. + entity_type (google.cloud.clouddms_v1.types.DatabaseEntityType): + The type of the database entity (table, view, + index, ...). + mappings (MutableSequence[google.cloud.clouddms_v1.types.EntityMapping]): + Details about entity mappings. + For source tree entities, this holds the draft + entities which were generated by the mapping + rules. + For draft tree entities, this holds the source + entities which were converted to form the draft + entity. + Destination entities will have no mapping + details. + schema (google.cloud.clouddms_v1.types.SchemaEntity): + Schema. + + This field is a member of `oneof`_ ``entity_body``. + table (google.cloud.clouddms_v1.types.TableEntity): + Table. + + This field is a member of `oneof`_ ``entity_body``. + view (google.cloud.clouddms_v1.types.ViewEntity): + View. + + This field is a member of `oneof`_ ``entity_body``. + sequence (google.cloud.clouddms_v1.types.SequenceEntity): + Sequence. + + This field is a member of `oneof`_ ``entity_body``. + stored_procedure (google.cloud.clouddms_v1.types.StoredProcedureEntity): + Stored procedure. + + This field is a member of `oneof`_ ``entity_body``. + database_function (google.cloud.clouddms_v1.types.FunctionEntity): + Function. + + This field is a member of `oneof`_ ``entity_body``. + synonym (google.cloud.clouddms_v1.types.SynonymEntity): + Synonym. + + This field is a member of `oneof`_ ``entity_body``. + database_package (google.cloud.clouddms_v1.types.PackageEntity): + Package. + + This field is a member of `oneof`_ ``entity_body``. + """ + class TreeType(proto.Enum): + r"""The type of database entities tree. + + Values: + TREE_TYPE_UNSPECIFIED (0): + Tree type unspecified. + SOURCE (1): + Tree of entities loaded from a source + database. + DRAFT (2): + Tree of entities converted from the source + tree using the mapping rules. + DESTINATION (3): + Tree of entities observed on the destination + database. + """ + TREE_TYPE_UNSPECIFIED = 0 + SOURCE = 1 + DRAFT = 2 + DESTINATION = 3 + + short_name: str = proto.Field( + proto.STRING, + number=1, + ) + parent_entity: str = proto.Field( + proto.STRING, + number=2, + ) + tree: TreeType = proto.Field( + proto.ENUM, + number=3, + enum=TreeType, + ) + entity_type: 'DatabaseEntityType' = proto.Field( + proto.ENUM, + number=4, + enum='DatabaseEntityType', + ) + mappings: MutableSequence['EntityMapping'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='EntityMapping', + ) + schema: 'SchemaEntity' = proto.Field( + proto.MESSAGE, + number=102, + oneof='entity_body', + message='SchemaEntity', + ) + table: 'TableEntity' = proto.Field( + proto.MESSAGE, + number=103, + oneof='entity_body', + message='TableEntity', + ) + view: 'ViewEntity' = proto.Field( + proto.MESSAGE, + number=104, + oneof='entity_body', + message='ViewEntity', + ) + sequence: 'SequenceEntity' = proto.Field( + proto.MESSAGE, + number=105, + oneof='entity_body', + message='SequenceEntity', + ) + stored_procedure: 'StoredProcedureEntity' = proto.Field( + proto.MESSAGE, + number=106, + oneof='entity_body', + message='StoredProcedureEntity', + ) + database_function: 'FunctionEntity' = proto.Field( + proto.MESSAGE, + number=107, + oneof='entity_body', + message='FunctionEntity', + ) + synonym: 'SynonymEntity' = proto.Field( + proto.MESSAGE, + number=108, + oneof='entity_body', + message='SynonymEntity', + ) + database_package: 'PackageEntity' = proto.Field( + proto.MESSAGE, + number=109, + oneof='entity_body', + message='PackageEntity', + ) + + +class SchemaEntity(proto.Message): + r"""Schema typically has no parent entity, but can have a parent + entity DatabaseInstance (for database engines which support it). + For some database engines, the terms schema and user can be + used interchangeably when they refer to a namespace or a + collection of other database entities. Can store additional + information which is schema specific. + + Attributes: + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=1, + message=struct_pb2.Struct, + ) + + +class TableEntity(proto.Message): + r"""Table's parent is a schema. + + Attributes: + columns (MutableSequence[google.cloud.clouddms_v1.types.ColumnEntity]): + Table columns. + constraints (MutableSequence[google.cloud.clouddms_v1.types.ConstraintEntity]): + Table constraints. + indices (MutableSequence[google.cloud.clouddms_v1.types.IndexEntity]): + Table indices. + triggers (MutableSequence[google.cloud.clouddms_v1.types.TriggerEntity]): + Table triggers. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + comment (str): + Comment associated with the table. + """ + + columns: MutableSequence['ColumnEntity'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ColumnEntity', + ) + constraints: MutableSequence['ConstraintEntity'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='ConstraintEntity', + ) + indices: MutableSequence['IndexEntity'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='IndexEntity', + ) + triggers: MutableSequence['TriggerEntity'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='TriggerEntity', + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + comment: str = proto.Field( + proto.STRING, + number=6, + ) + + +class ColumnEntity(proto.Message): + r"""Column is not used as an independent entity, it is retrieved + as part of a Table entity. + + Attributes: + name (str): + Column name. + data_type (str): + Column data type. + charset (str): + Charset override - instead of table level + charset. + collation (str): + Collation override - instead of table level + collation. + length (int): + Column length - e.g. varchar (50). + precision (int): + Column precision - when relevant. + scale (int): + Column scale - when relevant. + fractional_seconds_precision (int): + Column fractional second precision - used for + timestamp based datatypes. + array (bool): + Is the column of array type. + array_length (int): + If the column is array, of which length. + nullable (bool): + Is the column nullable. + auto_generated (bool): + Is the column auto-generated/identity. + udt (bool): + Is the column a UDT. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + set_values (MutableSequence[str]): + Specifies the list of values allowed in the + column. Only used for set data type. + comment (str): + Comment associated with the column. + ordinal_position (int): + Column order in the table. + default_value (str): + Default value of the column. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + data_type: str = proto.Field( + proto.STRING, + number=2, + ) + charset: str = proto.Field( + proto.STRING, + number=3, + ) + collation: str = proto.Field( + proto.STRING, + number=4, + ) + length: int = proto.Field( + proto.INT64, + number=5, + ) + precision: int = proto.Field( + proto.INT32, + number=6, + ) + scale: int = proto.Field( + proto.INT32, + number=7, + ) + fractional_seconds_precision: int = proto.Field( + proto.INT32, + number=8, + ) + array: bool = proto.Field( + proto.BOOL, + number=9, + ) + array_length: int = proto.Field( + proto.INT32, + number=10, + ) + nullable: bool = proto.Field( + proto.BOOL, + number=11, + ) + auto_generated: bool = proto.Field( + proto.BOOL, + number=12, + ) + udt: bool = proto.Field( + proto.BOOL, + number=13, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=14, + message=struct_pb2.Struct, + ) + set_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=15, + ) + comment: str = proto.Field( + proto.STRING, + number=16, + ) + ordinal_position: int = proto.Field( + proto.INT32, + number=17, + ) + default_value: str = proto.Field( + proto.STRING, + number=18, + ) + + +class ConstraintEntity(proto.Message): + r"""Constraint is not used as an independent entity, it is + retrieved as part of another entity such as Table or View. + + Attributes: + name (str): + The name of the table constraint. + type_ (str): + Type of constraint, for example unique, + primary key, foreign key (currently only primary + key is supported). + table_columns (MutableSequence[str]): + Table columns used as part of the Constraint, + for example primary key constraint should list + the columns which constitutes the key. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + reference_columns (MutableSequence[str]): + Reference columns which may be associated with the + constraint. For example, if the constraint is a FOREIGN_KEY, + this represents the list of full names of referenced columns + by the foreign key. + reference_table (str): + Reference table which may be associated with the constraint. + For example, if the constraint is a FOREIGN_KEY, this + represents the list of full name of the referenced table by + the foreign key. + table_name (str): + Table which is associated with the constraint. In case the + constraint is defined on a table, this field is left empty + as this information is stored in parent_name. However, if + constraint is defined on a view, this field stores the table + name on which the view is defined. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + table_columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=4, + message=struct_pb2.Struct, + ) + reference_columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + reference_table: str = proto.Field( + proto.STRING, + number=6, + ) + table_name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class IndexEntity(proto.Message): + r"""Index is not used as an independent entity, it is retrieved + as part of a Table entity. + + Attributes: + name (str): + The name of the index. + type_ (str): + Type of index, for example B-TREE. + table_columns (MutableSequence[str]): + Table columns used as part of the Index, for + example B-TREE index should list the columns + which constitutes the index. + unique (bool): + Boolean value indicating whether the index is + unique. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + table_columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + unique: bool = proto.Field( + proto.BOOL, + number=4, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + + +class TriggerEntity(proto.Message): + r"""Trigger is not used as an independent entity, it is retrieved + as part of a Table entity. + + Attributes: + name (str): + The name of the trigger. + triggering_events (MutableSequence[str]): + The DML, DDL, or database events that fire + the trigger, for example INSERT, UPDATE. + trigger_type (str): + Indicates when the trigger fires, for example + BEFORE STATEMENT, AFTER EACH ROW. + sql_code (str): + The SQL code which creates the trigger. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + triggering_events: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + trigger_type: str = proto.Field( + proto.STRING, + number=3, + ) + sql_code: str = proto.Field( + proto.STRING, + number=4, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + + +class ViewEntity(proto.Message): + r"""View's parent is a schema. + + Attributes: + sql_code (str): + The SQL code which creates the view. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + constraints (MutableSequence[google.cloud.clouddms_v1.types.ConstraintEntity]): + View constraints. + """ + + sql_code: str = proto.Field( + proto.STRING, + number=1, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + constraints: MutableSequence['ConstraintEntity'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='ConstraintEntity', + ) + + +class SequenceEntity(proto.Message): + r"""Sequence's parent is a schema. + + Attributes: + increment (int): + Increment value for the sequence. + start_value (bytes): + Start number for the sequence represented as + bytes to accommodate large. numbers + max_value (bytes): + Maximum number for the sequence represented + as bytes to accommodate large. numbers + min_value (bytes): + Minimum number for the sequence represented + as bytes to accommodate large. numbers + cycle (bool): + Indicates whether the sequence value should + cycle through. + cache (int): + Indicates number of entries to cache / + precreate. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + increment: int = proto.Field( + proto.INT64, + number=1, + ) + start_value: bytes = proto.Field( + proto.BYTES, + number=2, + ) + max_value: bytes = proto.Field( + proto.BYTES, + number=3, + ) + min_value: bytes = proto.Field( + proto.BYTES, + number=4, + ) + cycle: bool = proto.Field( + proto.BOOL, + number=5, + ) + cache: int = proto.Field( + proto.INT64, + number=6, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=7, + message=struct_pb2.Struct, + ) + + +class StoredProcedureEntity(proto.Message): + r"""Stored procedure's parent is a schema. + + Attributes: + sql_code (str): + The SQL code which creates the stored + procedure. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + sql_code: str = proto.Field( + proto.STRING, + number=1, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + + +class FunctionEntity(proto.Message): + r"""Function's parent is a schema. + + Attributes: + sql_code (str): + The SQL code which creates the function. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + sql_code: str = proto.Field( + proto.STRING, + number=1, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + + +class SynonymEntity(proto.Message): + r"""Synonym's parent is a schema. + + Attributes: + source_entity (str): + The name of the entity for which the synonym + is being created (the source). + source_type (google.cloud.clouddms_v1.types.DatabaseEntityType): + The type of the entity for which the synonym + is being created (usually a table or a + sequence). + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + source_entity: str = proto.Field( + proto.STRING, + number=1, + ) + source_type: 'DatabaseEntityType' = proto.Field( + proto.ENUM, + number=2, + enum='DatabaseEntityType', + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Struct, + ) + + +class PackageEntity(proto.Message): + r"""Package's parent is a schema. + + Attributes: + package_sql_code (str): + The SQL code which creates the package. + package_body (str): + The SQL code which creates the package body. + If the package specification has cursors or + subprograms, then the package body is mandatory. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + package_sql_code: str = proto.Field( + proto.STRING, + number=1, + ) + package_body: str = proto.Field( + proto.STRING, + number=2, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Struct, + ) + + +class EntityMapping(proto.Message): + r"""Details of the mappings of a database entity. + + Attributes: + source_entity (str): + Source entity full name. + The source entity can also be a column, index or + constraint using the same naming notation + schema.table.column. + draft_entity (str): + Target entity full name. + The draft entity can also include a column, + index or constraint using the same naming + notation schema.table.column. + source_type (google.cloud.clouddms_v1.types.DatabaseEntityType): + Type of source entity. + draft_type (google.cloud.clouddms_v1.types.DatabaseEntityType): + Type of draft entity. + mapping_log (MutableSequence[google.cloud.clouddms_v1.types.EntityMappingLogEntry]): + Entity mapping log entries. + Multiple rules can be effective and contribute + changes to a converted entity, such as a rule + can handle the entity name, another rule can + handle an entity type. In addition, rules which + did not change the entity are also logged along + with the reason preventing them to do so. + """ + + source_entity: str = proto.Field( + proto.STRING, + number=1, + ) + draft_entity: str = proto.Field( + proto.STRING, + number=2, + ) + source_type: 'DatabaseEntityType' = proto.Field( + proto.ENUM, + number=4, + enum='DatabaseEntityType', + ) + draft_type: 'DatabaseEntityType' = proto.Field( + proto.ENUM, + number=5, + enum='DatabaseEntityType', + ) + mapping_log: MutableSequence['EntityMappingLogEntry'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='EntityMappingLogEntry', + ) + + +class EntityMappingLogEntry(proto.Message): + r"""A single record of a rule which was used for a mapping. + + Attributes: + rule_id (str): + Which rule caused this log entry. + rule_revision_id (str): + Rule revision ID. + mapping_comment (str): + Comment. + """ + + rule_id: str = proto.Field( + proto.STRING, + number=1, + ) + rule_revision_id: str = proto.Field( + proto.STRING, + number=2, + ) + mapping_comment: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini new file mode 100644 index 0000000..574c5ae --- /dev/null +++ b/owl-bot-staging/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py new file mode 100644 index 0000000..ee175f1 --- /dev/null +++ b/owl-bot-staging/v1/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/clouddms_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py new file mode 100644 index 0000000..62f65c2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApplyConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_apply_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ApplyConversionWorkspaceRequest( + connection_profile="connection_profile_value", + name="name_value", + ) + + # Make the request + operation = client.apply_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py new file mode 100644 index 0000000..d871897 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApplyConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_apply_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ApplyConversionWorkspaceRequest( + connection_profile="connection_profile_value", + name="name_value", + ) + + # Make the request + operation = client.apply_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py new file mode 100644 index 0000000..ffb4df7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CommitConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_commit_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.CommitConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.commit_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py new file mode 100644 index 0000000..7139db3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CommitConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_commit_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.CommitConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.commit_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py new file mode 100644 index 0000000..2fbb94d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ConvertConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_convert_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ConvertConversionWorkspaceRequest( + ) + + # Make the request + operation = client.convert_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py new file mode 100644 index 0000000..945b7aa --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ConvertConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_convert_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ConvertConversionWorkspaceRequest( + ) + + # Make the request + operation = client.convert_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_async.py new file mode 100644 index 0000000..e7615c7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_create_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.CreateConnectionProfileRequest( + parent="parent_value", + connection_profile_id="connection_profile_id_value", + connection_profile=connection_profile, + ) + + # Make the request + operation = client.create_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py new file mode 100644 index 0000000..bf8cd78 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_create_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.CreateConnectionProfileRequest( + parent="parent_value", + connection_profile_id="connection_profile_id_value", + connection_profile=connection_profile, + ) + + # Make the request + operation = client.create_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py new file mode 100644 index 0000000..d8a0466 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_create_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.CreateConversionWorkspaceRequest( + parent="parent_value", + conversion_workspace_id="conversion_workspace_id_value", + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.create_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py new file mode 100644 index 0000000..0288f42 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_create_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.CreateConversionWorkspaceRequest( + parent="parent_value", + conversion_workspace_id="conversion_workspace_id_value", + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.create_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_async.py new file mode 100644 index 0000000..cdeffd2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_async.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_create_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.CreateMigrationJobRequest( + parent="parent_value", + migration_job_id="migration_job_id_value", + migration_job=migration_job, + ) + + # Make the request + operation = client.create_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_sync.py new file mode 100644 index 0000000..a7a40ba --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_sync.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_create_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.CreateMigrationJobRequest( + parent="parent_value", + migration_job_id="migration_job_id_value", + migration_job=migration_job, + ) + + # Make the request + operation = client.create_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py new file mode 100644 index 0000000..eb459b7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_create_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + private_connection = clouddms_v1.PrivateConnection() + private_connection.vpc_peering_config.vpc_name = "vpc_name_value" + private_connection.vpc_peering_config.subnet = "subnet_value" + + request = clouddms_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py new file mode 100644 index 0000000..f1b9e9f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_create_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + private_connection = clouddms_v1.PrivateConnection() + private_connection.vpc_peering_config.vpc_name = "vpc_name_value" + private_connection.vpc_peering_config.subnet = "subnet_value" + + request = clouddms_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py new file mode 100644 index 0000000..e9c7703 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_delete_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConnectionProfileRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py new file mode 100644 index 0000000..4c05c38 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_delete_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConnectionProfileRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py new file mode 100644 index 0000000..1472d44 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_delete_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py new file mode 100644 index 0000000..0404763 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_delete_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_async.py new file mode 100644 index 0000000..15e2bb1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_delete_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteMigrationJobRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py new file mode 100644 index 0000000..f00b4de --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_delete_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteMigrationJobRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py new file mode 100644 index 0000000..adaa51c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_delete_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeletePrivateConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py new file mode 100644 index 0000000..841c724 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_delete_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeletePrivateConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py new file mode 100644 index 0000000..e4cd078 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DescribeConversionWorkspaceRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_describe_conversion_workspace_revisions(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = await client.describe_conversion_workspace_revisions(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py new file mode 100644 index 0000000..1952791 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DescribeConversionWorkspaceRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_describe_conversion_workspace_revisions(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = client.describe_conversion_workspace_revisions(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py new file mode 100644 index 0000000..61ced4f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DescribeDatabaseEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_describe_database_entities(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeDatabaseEntitiesRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + page_result = client.describe_database_entities(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py new file mode 100644 index 0000000..25821ef --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DescribeDatabaseEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_describe_database_entities(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeDatabaseEntitiesRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + page_result = client.describe_database_entities(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py new file mode 100644 index 0000000..a1b178b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchStaticIps +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_FetchStaticIps_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_fetch_static_ips(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.FetchStaticIpsRequest( + name="name_value", + ) + + # Make the request + page_result = client.fetch_static_ips(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_FetchStaticIps_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py new file mode 100644 index 0000000..563e9f1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchStaticIps +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_FetchStaticIps_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_fetch_static_ips(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.FetchStaticIpsRequest( + name="name_value", + ) + + # Make the request + page_result = client.fetch_static_ips(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_FetchStaticIps_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py new file mode 100644 index 0000000..8195933 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateSshScript +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GenerateSshScript_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_generate_ssh_script(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + vm_creation_config = clouddms_v1.VmCreationConfig() + vm_creation_config.vm_machine_type = "vm_machine_type_value" + + request = clouddms_v1.GenerateSshScriptRequest( + vm_creation_config=vm_creation_config, + vm="vm_value", + ) + + # Make the request + response = await client.generate_ssh_script(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GenerateSshScript_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py new file mode 100644 index 0000000..4ddf07a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateSshScript +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GenerateSshScript_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_generate_ssh_script(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + vm_creation_config = clouddms_v1.VmCreationConfig() + vm_creation_config.vm_machine_type = "vm_machine_type_value" + + request = clouddms_v1.GenerateSshScriptRequest( + vm_creation_config=vm_creation_config, + vm="vm_value", + ) + + # Make the request + response = client.generate_ssh_script(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GenerateSshScript_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_async.py new file mode 100644 index 0000000..8204812 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetConnectionProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_get_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConnectionProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_connection_profile(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetConnectionProfile_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py new file mode 100644 index 0000000..abc1e54 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetConnectionProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_get_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConnectionProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_connection_profile(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetConnectionProfile_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py new file mode 100644 index 0000000..d15aa5c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_get_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conversion_workspace(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py new file mode 100644 index 0000000..0ba92c8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_get_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = client.get_conversion_workspace(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_async.py new file mode 100644 index 0000000..e9513b0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_get_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetMigrationJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_migration_job(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_sync.py new file mode 100644 index 0000000..f9c5c2f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_get_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetMigrationJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_job(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py new file mode 100644 index 0000000..fb0a8b3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetPrivateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_get_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetPrivateConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_private_connection(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetPrivateConnection_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py new file mode 100644 index 0000000..eacb889 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetPrivateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_get_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetPrivateConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_private_connection(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetPrivateConnection_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py new file mode 100644 index 0000000..8234d57 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportMappingRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ImportMappingRules_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_import_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ImportMappingRulesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.import_mapping_rules(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ImportMappingRules_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py new file mode 100644 index 0000000..b5cbb03 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportMappingRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ImportMappingRules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_import_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ImportMappingRulesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.import_mapping_rules(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ImportMappingRules_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py new file mode 100644 index 0000000..48cde5c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConnectionProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_list_connection_profiles(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConnectionProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connection_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py new file mode 100644 index 0000000..e2439ba --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConnectionProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_list_connection_profiles(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConnectionProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connection_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py new file mode 100644 index 0000000..aa6d6b2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversionWorkspaces +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_list_conversion_workspaces(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConversionWorkspacesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversion_workspaces(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py new file mode 100644 index 0000000..e9e5c74 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversionWorkspaces +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_list_conversion_workspaces(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConversionWorkspacesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversion_workspaces(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py new file mode 100644 index 0000000..35d5950 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListMigrationJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_list_migration_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListMigrationJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListMigrationJobs_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py new file mode 100644 index 0000000..de9611f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListMigrationJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_list_migration_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListMigrationJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListMigrationJobs_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py new file mode 100644 index 0000000..0149e98 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPrivateConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListPrivateConnections_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_list_private_connections(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListPrivateConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_private_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListPrivateConnections_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py new file mode 100644 index 0000000..5eec2f6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPrivateConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListPrivateConnections_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_list_private_connections(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListPrivateConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_private_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListPrivateConnections_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_async.py new file mode 100644 index 0000000..a7e0d5b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PromoteMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_promote_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.PromoteMigrationJobRequest( + ) + + # Make the request + operation = client.promote_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py new file mode 100644 index 0000000..1972401 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PromoteMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_promote_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.PromoteMigrationJobRequest( + ) + + # Make the request + operation = client.promote_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_async.py new file mode 100644 index 0000000..22db451 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestartMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_RestartMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_restart_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.RestartMigrationJobRequest( + ) + + # Make the request + operation = client.restart_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_RestartMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py new file mode 100644 index 0000000..5f8ab15 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestartMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_RestartMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_restart_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.RestartMigrationJobRequest( + ) + + # Make the request + operation = client.restart_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_RestartMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_async.py new file mode 100644 index 0000000..c59e5dd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ResumeMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_resume_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ResumeMigrationJobRequest( + ) + + # Make the request + operation = client.resume_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py new file mode 100644 index 0000000..75d40af --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ResumeMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_resume_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ResumeMigrationJobRequest( + ) + + # Make the request + operation = client.resume_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py new file mode 100644 index 0000000..c9e922c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RollbackConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_rollback_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.RollbackConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py new file mode 100644 index 0000000..e8952f3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RollbackConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_rollback_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.RollbackConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py new file mode 100644 index 0000000..74794d4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchBackgroundJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_search_background_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.SearchBackgroundJobsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = await client.search_background_jobs(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py new file mode 100644 index 0000000..4ab3279 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchBackgroundJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_search_background_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.SearchBackgroundJobsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = client.search_background_jobs(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py new file mode 100644 index 0000000..46bdf92 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SeedConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_seed_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.SeedConversionWorkspaceRequest( + source_connection_profile="source_connection_profile_value", + ) + + # Make the request + operation = client.seed_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py new file mode 100644 index 0000000..647c3d2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SeedConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_seed_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.SeedConversionWorkspaceRequest( + source_connection_profile="source_connection_profile_value", + ) + + # Make the request + operation = client.seed_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_async.py new file mode 100644 index 0000000..bfe21ee --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_StartMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_start_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.StartMigrationJobRequest( + ) + + # Make the request + operation = client.start_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_StartMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_sync.py new file mode 100644 index 0000000..b73ede5 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_StartMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_start_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.StartMigrationJobRequest( + ) + + # Make the request + operation = client.start_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_StartMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_async.py new file mode 100644 index 0000000..b6352ee --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_StopMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_stop_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.StopMigrationJobRequest( + ) + + # Make the request + operation = client.stop_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_StopMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py new file mode 100644 index 0000000..9c6fdc4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_StopMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_stop_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.StopMigrationJobRequest( + ) + + # Make the request + operation = client.stop_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_StopMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_async.py new file mode 100644 index 0000000..4776efa --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_update_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.UpdateConnectionProfileRequest( + connection_profile=connection_profile, + ) + + # Make the request + operation = client.update_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py new file mode 100644 index 0000000..68cae51 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConnectionProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_update_connection_profile(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + connection_profile = clouddms_v1.ConnectionProfile() + connection_profile.mysql.host = "host_value" + connection_profile.mysql.port = 453 + connection_profile.mysql.username = "username_value" + connection_profile.mysql.password = "password_value" + + request = clouddms_v1.UpdateConnectionProfileRequest( + connection_profile=connection_profile, + ) + + # Make the request + operation = client.update_connection_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py new file mode 100644 index 0000000..3361ce9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_update_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.UpdateConversionWorkspaceRequest( + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.update_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py new file mode 100644 index 0000000..2a1ba33 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_update_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.UpdateConversionWorkspaceRequest( + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.update_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_async.py new file mode 100644 index 0000000..76363dc --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_update_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.UpdateMigrationJobRequest( + migration_job=migration_job, + ) + + # Make the request + operation = client.update_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_sync.py new file mode 100644 index 0000000..35eee8d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_update_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + migration_job = clouddms_v1.MigrationJob() + migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" + migration_job.reverse_ssh_connectivity.vm_port = 775 + migration_job.type_ = "CONTINUOUS" + migration_job.source = "source_value" + migration_job.destination = "destination_value" + + request = clouddms_v1.UpdateMigrationJobRequest( + migration_job=migration_job, + ) + + # Make the request + operation = client.update_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_async.py new file mode 100644 index 0000000..daee5a7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for VerifyMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_verify_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.VerifyMigrationJobRequest( + ) + + # Make the request + operation = client.verify_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py new file mode 100644 index 0000000..901a5f7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for VerifyMigrationJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_verify_migration_job(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.VerifyMigrationJobRequest( + ) + + # Make the request + operation = client.verify_migration_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json new file mode 100644 index 0000000..10d2ba4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json @@ -0,0 +1,5771 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.clouddms.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-dms", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.apply_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ApplyConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ApplyConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "apply_conversion_workspace" + }, + "description": "Sample for ApplyConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.apply_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ApplyConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ApplyConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "apply_conversion_workspace" + }, + "description": "Sample for ApplyConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.commit_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CommitConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CommitConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "commit_conversion_workspace" + }, + "description": "Sample for CommitConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.commit_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CommitConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CommitConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "commit_conversion_workspace" + }, + "description": "Sample for CommitConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.convert_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ConvertConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ConvertConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "convert_conversion_workspace" + }, + "description": "Sample for ConvertConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.convert_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ConvertConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ConvertConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "convert_conversion_workspace" + }, + "description": "Sample for ConvertConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateConnectionProfileRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "connection_profile", + "type": "google.cloud.clouddms_v1.types.ConnectionProfile" + }, + { + "name": "connection_profile_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_connection_profile" + }, + "description": "Sample for CreateConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_create_connection_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_connection_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateConnectionProfileRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "connection_profile", + "type": "google.cloud.clouddms_v1.types.ConnectionProfile" + }, + { + "name": "connection_profile_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_connection_profile" + }, + "description": "Sample for CreateConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "conversion_workspace_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_conversion_workspace" + }, + "description": "Sample for CreateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "conversion_workspace_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_conversion_workspace" + }, + "description": "Sample for CreateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateMigrationJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "migration_job", + "type": "google.cloud.clouddms_v1.types.MigrationJob" + }, + { + "name": "migration_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_migration_job" + }, + "description": "Sample for CreateMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_create_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMigrationJob_async", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateMigrationJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "migration_job", + "type": "google.cloud.clouddms_v1.types.MigrationJob" + }, + { + "name": "migration_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_migration_job" + }, + "description": "Sample for CreateMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_create_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMigrationJob_sync", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreatePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreatePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "private_connection", + "type": "google.cloud.clouddms_v1.types.PrivateConnection" + }, + { + "name": "private_connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_private_connection" + }, + "description": "Sample for CreatePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_create_private_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_private_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreatePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreatePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "private_connection", + "type": "google.cloud.clouddms_v1.types.PrivateConnection" + }, + { + "name": "private_connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_private_connection" + }, + "description": "Sample for CreatePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_create_private_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_private_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_connection_profile" + }, + "description": "Sample for DeleteConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_connection_profile" + }, + "description": "Sample for DeleteConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_conversion_workspace" + }, + "description": "Sample for DeleteConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_conversion_workspace" + }, + "description": "Sample for DeleteConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_migration_job" + }, + "description": "Sample for DeleteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_delete_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_migration_job" + }, + "description": "Sample for DeleteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeletePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeletePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_private_connection" + }, + "description": "Sample for DeletePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_delete_private_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_private_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeletePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeletePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_private_connection" + }, + "description": "Sample for DeletePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.describe_conversion_workspace_revisions", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeConversionWorkspaceRevisions", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeConversionWorkspaceRevisions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse", + "shortName": "describe_conversion_workspace_revisions" + }, + "description": "Sample for DescribeConversionWorkspaceRevisions", + "file": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.describe_conversion_workspace_revisions", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeConversionWorkspaceRevisions", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeConversionWorkspaceRevisions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse", + "shortName": "describe_conversion_workspace_revisions" + }, + "description": "Sample for DescribeConversionWorkspaceRevisions", + "file": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.describe_database_entities", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeDatabaseEntities", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeDatabaseEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesAsyncPager", + "shortName": "describe_database_entities" + }, + "description": "Sample for DescribeDatabaseEntities", + "file": "datamigration_v1_generated_data_migration_service_describe_database_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_database_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.describe_database_entities", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeDatabaseEntities", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeDatabaseEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesPager", + "shortName": "describe_database_entities" + }, + "description": "Sample for DescribeDatabaseEntities", + "file": "datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.fetch_static_ips", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.FetchStaticIps", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "FetchStaticIps" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.FetchStaticIpsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsAsyncPager", + "shortName": "fetch_static_ips" + }, + "description": "Sample for FetchStaticIps", + "file": "datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_FetchStaticIps_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.fetch_static_ips", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.FetchStaticIps", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "FetchStaticIps" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.FetchStaticIpsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsPager", + "shortName": "fetch_static_ips" + }, + "description": "Sample for FetchStaticIps", + "file": "datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_FetchStaticIps_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.generate_ssh_script", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateSshScript", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GenerateSshScript" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GenerateSshScriptRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.SshScript", + "shortName": "generate_ssh_script" + }, + "description": "Sample for GenerateSshScript", + "file": "datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateSshScript_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.generate_ssh_script", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateSshScript", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GenerateSshScript" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GenerateSshScriptRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.SshScript", + "shortName": "generate_ssh_script" + }, + "description": "Sample for GenerateSshScript", + "file": "datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateSshScript_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConnectionProfile", + "shortName": "get_connection_profile" + }, + "description": "Sample for GetConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_get_connection_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConnectionProfile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_connection_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConnectionProfile", + "shortName": "get_connection_profile" + }, + "description": "Sample for GetConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConnectionProfile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConversionWorkspace", + "shortName": "get_conversion_workspace" + }, + "description": "Sample for GetConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConversionWorkspace", + "shortName": "get_conversion_workspace" + }, + "description": "Sample for GetConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.MigrationJob", + "shortName": "get_migration_job" + }, + "description": "Sample for GetMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_get_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetMigrationJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.MigrationJob", + "shortName": "get_migration_job" + }, + "description": "Sample for GetMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_get_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetMigrationJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetPrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetPrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetPrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.PrivateConnection", + "shortName": "get_private_connection" + }, + "description": "Sample for GetPrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_get_private_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetPrivateConnection_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_private_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetPrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetPrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetPrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.PrivateConnection", + "shortName": "get_private_connection" + }, + "description": "Sample for GetPrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_get_private_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetPrivateConnection_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_private_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.import_mapping_rules", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ImportMappingRules", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ImportMappingRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ImportMappingRulesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_mapping_rules" + }, + "description": "Sample for ImportMappingRules", + "file": "datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ImportMappingRules_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.import_mapping_rules", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ImportMappingRules", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ImportMappingRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ImportMappingRulesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_mapping_rules" + }, + "description": "Sample for ImportMappingRules", + "file": "datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ImportMappingRules_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_connection_profiles", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConnectionProfiles", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConnectionProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConnectionProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesAsyncPager", + "shortName": "list_connection_profiles" + }, + "description": "Sample for ListConnectionProfiles", + "file": "datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_connection_profiles", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConnectionProfiles", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConnectionProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConnectionProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesPager", + "shortName": "list_connection_profiles" + }, + "description": "Sample for ListConnectionProfiles", + "file": "datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_conversion_workspaces", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConversionWorkspaces", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConversionWorkspaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesAsyncPager", + "shortName": "list_conversion_workspaces" + }, + "description": "Sample for ListConversionWorkspaces", + "file": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_conversion_workspaces", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConversionWorkspaces", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConversionWorkspaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesPager", + "shortName": "list_conversion_workspaces" + }, + "description": "Sample for ListConversionWorkspaces", + "file": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_migration_jobs", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMigrationJobs", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListMigrationJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListMigrationJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsAsyncPager", + "shortName": "list_migration_jobs" + }, + "description": "Sample for ListMigrationJobs", + "file": "datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListMigrationJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_migration_jobs", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMigrationJobs", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListMigrationJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListMigrationJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsPager", + "shortName": "list_migration_jobs" + }, + "description": "Sample for ListMigrationJobs", + "file": "datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListMigrationJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_private_connections", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListPrivateConnections", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListPrivateConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsAsyncPager", + "shortName": "list_private_connections" + }, + "description": "Sample for ListPrivateConnections", + "file": "datamigration_v1_generated_data_migration_service_list_private_connections_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListPrivateConnections_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_private_connections_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_private_connections", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListPrivateConnections", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListPrivateConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsPager", + "shortName": "list_private_connections" + }, + "description": "Sample for ListPrivateConnections", + "file": "datamigration_v1_generated_data_migration_service_list_private_connections_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListPrivateConnections_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_private_connections_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.promote_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.PromoteMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "PromoteMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.PromoteMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "promote_migration_job" + }, + "description": "Sample for PromoteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_promote_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_promote_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.promote_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.PromoteMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "PromoteMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.PromoteMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "promote_migration_job" + }, + "description": "Sample for PromoteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.restart_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RestartMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "RestartMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.RestartMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restart_migration_job" + }, + "description": "Sample for RestartMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_restart_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_RestartMigrationJob_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_restart_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.restart_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RestartMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "RestartMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.RestartMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restart_migration_job" + }, + "description": "Sample for RestartMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_RestartMigrationJob_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.resume_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ResumeMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ResumeMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ResumeMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "resume_migration_job" + }, + "description": "Sample for ResumeMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_resume_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_resume_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.resume_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ResumeMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ResumeMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ResumeMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "resume_migration_job" + }, + "description": "Sample for ResumeMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.rollback_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RollbackConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "RollbackConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "rollback_conversion_workspace" + }, + "description": "Sample for RollbackConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.rollback_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RollbackConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "RollbackConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "rollback_conversion_workspace" + }, + "description": "Sample for RollbackConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.search_background_jobs", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SearchBackgroundJobs", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "SearchBackgroundJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse", + "shortName": "search_background_jobs" + }, + "description": "Sample for SearchBackgroundJobs", + "file": "datamigration_v1_generated_data_migration_service_search_background_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_search_background_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.search_background_jobs", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SearchBackgroundJobs", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "SearchBackgroundJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse", + "shortName": "search_background_jobs" + }, + "description": "Sample for SearchBackgroundJobs", + "file": "datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.seed_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SeedConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "SeedConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "seed_conversion_workspace" + }, + "description": "Sample for SeedConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.seed_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SeedConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "SeedConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "seed_conversion_workspace" + }, + "description": "Sample for SeedConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.start_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.StartMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "StartMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.StartMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "start_migration_job" + }, + "description": "Sample for StartMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_start_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_StartMigrationJob_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_start_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.start_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.StartMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "StartMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.StartMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "start_migration_job" + }, + "description": "Sample for StartMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_start_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_StartMigrationJob_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_start_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.stop_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.StopMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "StopMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.StopMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "stop_migration_job" + }, + "description": "Sample for StopMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_stop_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_StopMigrationJob_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_stop_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.stop_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.StopMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "StopMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.StopMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "stop_migration_job" + }, + "description": "Sample for StopMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_StopMigrationJob_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.update_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateConnectionProfileRequest" + }, + { + "name": "connection_profile", + "type": "google.cloud.clouddms_v1.types.ConnectionProfile" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_connection_profile" + }, + "description": "Sample for UpdateConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_update_connection_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_connection_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.update_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateConnectionProfileRequest" + }, + { + "name": "connection_profile", + "type": "google.cloud.clouddms_v1.types.ConnectionProfile" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_connection_profile" + }, + "description": "Sample for UpdateConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.update_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_conversion_workspace" + }, + "description": "Sample for UpdateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.update_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_conversion_workspace" + }, + "description": "Sample for UpdateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.update_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateMigrationJobRequest" + }, + { + "name": "migration_job", + "type": "google.cloud.clouddms_v1.types.MigrationJob" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_migration_job" + }, + "description": "Sample for UpdateMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_update_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.update_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateMigrationJobRequest" + }, + { + "name": "migration_job", + "type": "google.cloud.clouddms_v1.types.MigrationJob" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_migration_job" + }, + "description": "Sample for UpdateMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_update_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.verify_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.VerifyMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "VerifyMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.VerifyMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "verify_migration_job" + }, + "description": "Sample for VerifyMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_verify_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_verify_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.verify_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.VerifyMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "VerifyMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.VerifyMigrationJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "verify_migration_job" + }, + "description": "Sample for VerifyMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py" + } + ] +} diff --git a/owl-bot-staging/v1/scripts/fixup_clouddms_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_clouddms_v1_keywords.py new file mode 100644 index 0000000..ad6ad15 --- /dev/null +++ b/owl-bot-staging/v1/scripts/fixup_clouddms_v1_keywords.py @@ -0,0 +1,211 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class clouddmsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'apply_conversion_workspace': ('name', 'filter', 'connection_profile', ), + 'commit_conversion_workspace': ('name', 'commit_name', ), + 'convert_conversion_workspace': ('name', 'auto_commit', 'filter', ), + 'create_connection_profile': ('parent', 'connection_profile_id', 'connection_profile', 'request_id', 'validate_only', 'skip_validation', ), + 'create_conversion_workspace': ('parent', 'conversion_workspace_id', 'conversion_workspace', 'request_id', ), + 'create_migration_job': ('parent', 'migration_job_id', 'migration_job', 'request_id', ), + 'create_private_connection': ('parent', 'private_connection_id', 'private_connection', 'request_id', 'skip_validation', ), + 'delete_connection_profile': ('name', 'request_id', 'force', ), + 'delete_conversion_workspace': ('name', 'request_id', ), + 'delete_migration_job': ('name', 'request_id', 'force', ), + 'delete_private_connection': ('name', 'request_id', ), + 'describe_conversion_workspace_revisions': ('conversion_workspace', 'commit_id', ), + 'describe_database_entities': ('conversion_workspace', 'page_size', 'page_token', 'tree', 'uncommitted', 'commit_id', 'filter', ), + 'fetch_static_ips': ('name', 'page_size', 'page_token', ), + 'generate_ssh_script': ('vm', 'migration_job', 'vm_creation_config', 'vm_selection_config', 'vm_port', ), + 'get_connection_profile': ('name', ), + 'get_conversion_workspace': ('name', ), + 'get_migration_job': ('name', ), + 'get_private_connection': ('name', ), + 'import_mapping_rules': ('parent', 'rules_format', 'rules_files', 'auto_commit', ), + 'list_connection_profiles': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_conversion_workspaces': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_migration_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_private_connections': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'promote_migration_job': ('name', ), + 'restart_migration_job': ('name', ), + 'resume_migration_job': ('name', ), + 'rollback_conversion_workspace': ('name', ), + 'search_background_jobs': ('conversion_workspace', 'return_most_recent_per_job_type', 'max_size', 'completed_until_time', ), + 'seed_conversion_workspace': ('name', 'auto_commit', 'source_connection_profile', 'destination_connection_profile', ), + 'start_migration_job': ('name', ), + 'stop_migration_job': ('name', ), + 'update_connection_profile': ('update_mask', 'connection_profile', 'request_id', 'validate_only', 'skip_validation', ), + 'update_conversion_workspace': ('update_mask', 'conversion_workspace', 'request_id', ), + 'update_migration_job': ('update_mask', 'migration_job', 'request_id', ), + 'verify_migration_job': ('name', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=clouddmsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the clouddms client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py new file mode 100644 index 0000000..2501d9d --- /dev/null +++ b/owl-bot-staging/v1/setup.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-dms' + + +description = "Google Cloud Dms API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/clouddms/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", +] +url = "https://github.com/googleapis/python-dms" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/v1/testing/constraints-3.10.txt b/owl-bot-staging/v1/testing/constraints-3.10.txt new file mode 100644 index 0000000..ad3f0fa --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/v1/testing/constraints-3.11.txt b/owl-bot-staging/v1/testing/constraints-3.11.txt new file mode 100644 index 0000000..ad3f0fa --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/v1/testing/constraints-3.12.txt b/owl-bot-staging/v1/testing/constraints-3.12.txt new file mode 100644 index 0000000..ad3f0fa --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/v1/testing/constraints-3.7.txt b/owl-bot-staging/v1/testing/constraints-3.7.txt new file mode 100644 index 0000000..2beecf9 --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/owl-bot-staging/v1/testing/constraints-3.8.txt b/owl-bot-staging/v1/testing/constraints-3.8.txt new file mode 100644 index 0000000..ad3f0fa --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/v1/testing/constraints-3.9.txt b/owl-bot-staging/v1/testing/constraints-3.9.txt new file mode 100644 index 0000000..ad3f0fa --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py new file mode 100644 index 0000000..231bc12 --- /dev/null +++ b/owl-bot-staging/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py new file mode 100644 index 0000000..231bc12 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000..231bc12 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/__init__.py new file mode 100644 index 0000000..231bc12 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/test_data_migration_service.py b/owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/test_data_migration_service.py new file mode 100644 index 0000000..d1d3635 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/test_data_migration_service.py @@ -0,0 +1,10874 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.clouddms_v1.services.data_migration_service import DataMigrationServiceAsyncClient +from google.cloud.clouddms_v1.services.data_migration_service import DataMigrationServiceClient +from google.cloud.clouddms_v1.services.data_migration_service import pagers +from google.cloud.clouddms_v1.services.data_migration_service import transports +from google.cloud.clouddms_v1.types import clouddms +from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import conversionworkspace_resources +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataMigrationServiceClient._get_default_mtls_endpoint(None) is None + assert DataMigrationServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DataMigrationServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DataMigrationServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DataMigrationServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DataMigrationServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataMigrationServiceClient, "grpc"), + (DataMigrationServiceAsyncClient, "grpc_asyncio"), +]) +def test_data_migration_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datamigration.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DataMigrationServiceGrpcTransport, "grpc"), + (transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_data_migration_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataMigrationServiceClient, "grpc"), + (DataMigrationServiceAsyncClient, "grpc_asyncio"), +]) +def test_data_migration_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datamigration.googleapis.com:443' + ) + + +def test_data_migration_service_client_get_transport_class(): + transport = DataMigrationServiceClient.get_transport_class() + available_transports = [ + transports.DataMigrationServiceGrpcTransport, + ] + assert transport in available_transports + + transport = DataMigrationServiceClient.get_transport_class("grpc") + assert transport == transports.DataMigrationServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc"), + (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(DataMigrationServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataMigrationServiceClient)) +@mock.patch.object(DataMigrationServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataMigrationServiceAsyncClient)) +def test_data_migration_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataMigrationServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataMigrationServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc", "true"), + (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc", "false"), + (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(DataMigrationServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataMigrationServiceClient)) +@mock.patch.object(DataMigrationServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataMigrationServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_migration_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DataMigrationServiceClient, DataMigrationServiceAsyncClient +]) +@mock.patch.object(DataMigrationServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataMigrationServiceClient)) +@mock.patch.object(DataMigrationServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataMigrationServiceAsyncClient)) +def test_data_migration_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc"), + (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_data_migration_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc", grpc_helpers), + (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_data_migration_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_data_migration_service_client_client_options_from_dict(): + with mock.patch('google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DataMigrationServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc", grpc_helpers), + (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_data_migration_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "datamigration.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="datamigration.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.ListMigrationJobsRequest, + dict, +]) +def test_list_migration_jobs(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListMigrationJobsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_migration_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListMigrationJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMigrationJobsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_migration_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + client.list_migration_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListMigrationJobsRequest() + +@pytest.mark.asyncio +async def test_list_migration_jobs_async(transport: str = 'grpc_asyncio', request_type=clouddms.ListMigrationJobsRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListMigrationJobsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_migration_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListMigrationJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMigrationJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_migration_jobs_async_from_dict(): + await test_list_migration_jobs_async(request_type=dict) + + +def test_list_migration_jobs_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListMigrationJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + call.return_value = clouddms.ListMigrationJobsResponse() + client.list_migration_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_migration_jobs_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListMigrationJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListMigrationJobsResponse()) + await client.list_migration_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_migration_jobs_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListMigrationJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_migration_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_migration_jobs_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_migration_jobs( + clouddms.ListMigrationJobsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_migration_jobs_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListMigrationJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListMigrationJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_migration_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_migration_jobs_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_migration_jobs( + clouddms.ListMigrationJobsRequest(), + parent='parent_value', + ) + + +def test_list_migration_jobs_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + next_page_token='abc', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[], + next_page_token='def', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + ], + next_page_token='ghi', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_migration_jobs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, clouddms_resources.MigrationJob) + for i in results) +def test_list_migration_jobs_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + next_page_token='abc', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[], + next_page_token='def', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + ], + next_page_token='ghi', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_migration_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_migration_jobs_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + next_page_token='abc', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[], + next_page_token='def', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + ], + next_page_token='ghi', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_migration_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, clouddms_resources.MigrationJob) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_migration_jobs_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + next_page_token='abc', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[], + next_page_token='def', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + ], + next_page_token='ghi', + ), + clouddms.ListMigrationJobsResponse( + migration_jobs=[ + clouddms_resources.MigrationJob(), + clouddms_resources.MigrationJob(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_migration_jobs(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + clouddms.GetMigrationJobRequest, + dict, +]) +def test_get_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.MigrationJob( + name='name_value', + display_name='display_name_value', + state=clouddms_resources.MigrationJob.State.MAINTENANCE, + phase=clouddms_resources.MigrationJob.Phase.FULL_DUMP, + type_=clouddms_resources.MigrationJob.Type.ONE_TIME, + dump_path='dump_path_value', + source='source_value', + destination='destination_value', + filter='filter_value', + cmek_key_name='cmek_key_name_value', + reverse_ssh_connectivity=clouddms_resources.ReverseSshConnectivity(vm_ip='vm_ip_value'), + ) + response = client.get_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.MigrationJob) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.state == clouddms_resources.MigrationJob.State.MAINTENANCE + assert response.phase == clouddms_resources.MigrationJob.Phase.FULL_DUMP + assert response.type_ == clouddms_resources.MigrationJob.Type.ONE_TIME + assert response.dump_path == 'dump_path_value' + assert response.source == 'source_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.cmek_key_name == 'cmek_key_name_value' + + +def test_get_migration_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + client.get_migration_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetMigrationJobRequest() + +@pytest.mark.asyncio +async def test_get_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.GetMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.MigrationJob( + name='name_value', + display_name='display_name_value', + state=clouddms_resources.MigrationJob.State.MAINTENANCE, + phase=clouddms_resources.MigrationJob.Phase.FULL_DUMP, + type_=clouddms_resources.MigrationJob.Type.ONE_TIME, + dump_path='dump_path_value', + source='source_value', + destination='destination_value', + filter='filter_value', + cmek_key_name='cmek_key_name_value', + )) + response = await client.get_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.MigrationJob) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.state == clouddms_resources.MigrationJob.State.MAINTENANCE + assert response.phase == clouddms_resources.MigrationJob.Phase.FULL_DUMP + assert response.type_ == clouddms_resources.MigrationJob.Type.ONE_TIME + assert response.dump_path == 'dump_path_value' + assert response.source == 'source_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.cmek_key_name == 'cmek_key_name_value' + + +@pytest.mark.asyncio +async def test_get_migration_job_async_from_dict(): + await test_get_migration_job_async(request_type=dict) + + +def test_get_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + call.return_value = clouddms_resources.MigrationJob() + client.get_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.MigrationJob()) + await client.get_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_migration_job_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.MigrationJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_migration_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_migration_job_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_migration_job( + clouddms.GetMigrationJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_migration_job_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.MigrationJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.MigrationJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_migration_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_migration_job_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_migration_job( + clouddms.GetMigrationJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.CreateMigrationJobRequest, + dict, +]) +def test_create_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_migration_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + client.create_migration_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateMigrationJobRequest() + +@pytest.mark.asyncio +async def test_create_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.CreateMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_migration_job_async_from_dict(): + await test_create_migration_job_async(request_type=dict) + + +def test_create_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateMigrationJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateMigrationJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_migration_job_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_migration_job( + parent='parent_value', + migration_job=clouddms_resources.MigrationJob(name='name_value'), + migration_job_id='migration_job_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].migration_job + mock_val = clouddms_resources.MigrationJob(name='name_value') + assert arg == mock_val + arg = args[0].migration_job_id + mock_val = 'migration_job_id_value' + assert arg == mock_val + + +def test_create_migration_job_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_migration_job( + clouddms.CreateMigrationJobRequest(), + parent='parent_value', + migration_job=clouddms_resources.MigrationJob(name='name_value'), + migration_job_id='migration_job_id_value', + ) + +@pytest.mark.asyncio +async def test_create_migration_job_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_migration_job( + parent='parent_value', + migration_job=clouddms_resources.MigrationJob(name='name_value'), + migration_job_id='migration_job_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].migration_job + mock_val = clouddms_resources.MigrationJob(name='name_value') + assert arg == mock_val + arg = args[0].migration_job_id + mock_val = 'migration_job_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_migration_job_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_migration_job( + clouddms.CreateMigrationJobRequest(), + parent='parent_value', + migration_job=clouddms_resources.MigrationJob(name='name_value'), + migration_job_id='migration_job_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.UpdateMigrationJobRequest, + dict, +]) +def test_update_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_migration_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + client.update_migration_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateMigrationJobRequest() + +@pytest.mark.asyncio +async def test_update_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.UpdateMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_migration_job_async_from_dict(): + await test_update_migration_job_async(request_type=dict) + + +def test_update_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateMigrationJobRequest() + + request.migration_job.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'migration_job.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateMigrationJobRequest() + + request.migration_job.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'migration_job.name=name_value', + ) in kw['metadata'] + + +def test_update_migration_job_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_migration_job( + migration_job=clouddms_resources.MigrationJob(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].migration_job + mock_val = clouddms_resources.MigrationJob(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_migration_job_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_migration_job( + clouddms.UpdateMigrationJobRequest(), + migration_job=clouddms_resources.MigrationJob(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_migration_job_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_migration_job( + migration_job=clouddms_resources.MigrationJob(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].migration_job + mock_val = clouddms_resources.MigrationJob(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_migration_job_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_migration_job( + clouddms.UpdateMigrationJobRequest(), + migration_job=clouddms_resources.MigrationJob(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.DeleteMigrationJobRequest, + dict, +]) +def test_delete_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_migration_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + client.delete_migration_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteMigrationJobRequest() + +@pytest.mark.asyncio +async def test_delete_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.DeleteMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_migration_job_async_from_dict(): + await test_delete_migration_job_async(request_type=dict) + + +def test_delete_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_migration_job_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_migration_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_migration_job_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_migration_job( + clouddms.DeleteMigrationJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_migration_job_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_migration_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_migration_job_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_migration_job( + clouddms.DeleteMigrationJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.StartMigrationJobRequest, + dict, +]) +def test_start_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.start_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.StartMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_start_migration_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_job), + '__call__') as call: + client.start_migration_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.StartMigrationJobRequest() + +@pytest.mark.asyncio +async def test_start_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.StartMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.start_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.StartMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_start_migration_job_async_from_dict(): + await test_start_migration_job_async(request_type=dict) + + +def test_start_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.StartMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.start_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_start_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.StartMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.start_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.StopMigrationJobRequest, + dict, +]) +def test_stop_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stop_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.stop_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.StopMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_stop_migration_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stop_migration_job), + '__call__') as call: + client.stop_migration_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.StopMigrationJobRequest() + +@pytest.mark.asyncio +async def test_stop_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.StopMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stop_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.stop_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.StopMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_stop_migration_job_async_from_dict(): + await test_stop_migration_job_async(request_type=dict) + + +def test_stop_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.StopMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stop_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.stop_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_stop_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.StopMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stop_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.stop_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.ResumeMigrationJobRequest, + dict, +]) +def test_resume_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.resume_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ResumeMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_resume_migration_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_migration_job), + '__call__') as call: + client.resume_migration_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ResumeMigrationJobRequest() + +@pytest.mark.asyncio +async def test_resume_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.ResumeMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.resume_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ResumeMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_resume_migration_job_async_from_dict(): + await test_resume_migration_job_async(request_type=dict) + + +def test_resume_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ResumeMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.resume_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_resume_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ResumeMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.resume_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.PromoteMigrationJobRequest, + dict, +]) +def test_promote_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.promote_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.promote_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.PromoteMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_promote_migration_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.promote_migration_job), + '__call__') as call: + client.promote_migration_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.PromoteMigrationJobRequest() + +@pytest.mark.asyncio +async def test_promote_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.PromoteMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.promote_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.promote_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.PromoteMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_promote_migration_job_async_from_dict(): + await test_promote_migration_job_async(request_type=dict) + + +def test_promote_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.PromoteMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.promote_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.promote_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_promote_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.PromoteMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.promote_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.promote_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.VerifyMigrationJobRequest, + dict, +]) +def test_verify_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.verify_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.VerifyMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_verify_migration_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_migration_job), + '__call__') as call: + client.verify_migration_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.VerifyMigrationJobRequest() + +@pytest.mark.asyncio +async def test_verify_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.VerifyMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.verify_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.VerifyMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_verify_migration_job_async_from_dict(): + await test_verify_migration_job_async(request_type=dict) + + +def test_verify_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.VerifyMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.verify_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_verify_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.VerifyMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.verify_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.RestartMigrationJobRequest, + dict, +]) +def test_restart_migration_job(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restart_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.restart_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.RestartMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restart_migration_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restart_migration_job), + '__call__') as call: + client.restart_migration_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.RestartMigrationJobRequest() + +@pytest.mark.asyncio +async def test_restart_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.RestartMigrationJobRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restart_migration_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.restart_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.RestartMigrationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restart_migration_job_async_from_dict(): + await test_restart_migration_job_async(request_type=dict) + + +def test_restart_migration_job_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.RestartMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restart_migration_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.restart_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_restart_migration_job_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.RestartMigrationJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restart_migration_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.restart_migration_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.GenerateSshScriptRequest, + dict, +]) +def test_generate_ssh_script(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_ssh_script), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.SshScript( + script='script_value', + ) + response = client.generate_ssh_script(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GenerateSshScriptRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.SshScript) + assert response.script == 'script_value' + + +def test_generate_ssh_script_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_ssh_script), + '__call__') as call: + client.generate_ssh_script() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GenerateSshScriptRequest() + +@pytest.mark.asyncio +async def test_generate_ssh_script_async(transport: str = 'grpc_asyncio', request_type=clouddms.GenerateSshScriptRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_ssh_script), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.SshScript( + script='script_value', + )) + response = await client.generate_ssh_script(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GenerateSshScriptRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.SshScript) + assert response.script == 'script_value' + + +@pytest.mark.asyncio +async def test_generate_ssh_script_async_from_dict(): + await test_generate_ssh_script_async(request_type=dict) + + +def test_generate_ssh_script_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GenerateSshScriptRequest() + + request.migration_job = 'migration_job_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_ssh_script), + '__call__') as call: + call.return_value = clouddms.SshScript() + client.generate_ssh_script(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'migration_job=migration_job_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_generate_ssh_script_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GenerateSshScriptRequest() + + request.migration_job = 'migration_job_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_ssh_script), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.SshScript()) + await client.generate_ssh_script(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'migration_job=migration_job_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.ListConnectionProfilesRequest, + dict, +]) +def test_list_connection_profiles(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConnectionProfilesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_connection_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListConnectionProfilesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionProfilesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_connection_profiles_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + client.list_connection_profiles() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListConnectionProfilesRequest() + +@pytest.mark.asyncio +async def test_list_connection_profiles_async(transport: str = 'grpc_asyncio', request_type=clouddms.ListConnectionProfilesRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConnectionProfilesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_connection_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListConnectionProfilesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionProfilesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_connection_profiles_async_from_dict(): + await test_list_connection_profiles_async(request_type=dict) + + +def test_list_connection_profiles_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListConnectionProfilesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + call.return_value = clouddms.ListConnectionProfilesResponse() + client.list_connection_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_connection_profiles_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListConnectionProfilesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConnectionProfilesResponse()) + await client.list_connection_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_connection_profiles_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConnectionProfilesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_connection_profiles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_connection_profiles_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_connection_profiles( + clouddms.ListConnectionProfilesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_connection_profiles_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConnectionProfilesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConnectionProfilesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_connection_profiles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_connection_profiles_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_connection_profiles( + clouddms.ListConnectionProfilesRequest(), + parent='parent_value', + ) + + +def test_list_connection_profiles_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + next_page_token='abc', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[], + next_page_token='def', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + ], + next_page_token='ghi', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_connection_profiles(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, clouddms_resources.ConnectionProfile) + for i in results) +def test_list_connection_profiles_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + next_page_token='abc', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[], + next_page_token='def', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + ], + next_page_token='ghi', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + ), + RuntimeError, + ) + pages = list(client.list_connection_profiles(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_connection_profiles_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + next_page_token='abc', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[], + next_page_token='def', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + ], + next_page_token='ghi', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_connection_profiles(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, clouddms_resources.ConnectionProfile) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_connection_profiles_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connection_profiles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + next_page_token='abc', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[], + next_page_token='def', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + ], + next_page_token='ghi', + ), + clouddms.ListConnectionProfilesResponse( + connection_profiles=[ + clouddms_resources.ConnectionProfile(), + clouddms_resources.ConnectionProfile(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_connection_profiles(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + clouddms.GetConnectionProfileRequest, + dict, +]) +def test_get_connection_profile(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.ConnectionProfile( + name='name_value', + state=clouddms_resources.ConnectionProfile.State.DRAFT, + display_name='display_name_value', + provider=clouddms_resources.DatabaseProvider.CLOUDSQL, + mysql=clouddms_resources.MySqlConnectionProfile(host='host_value'), + ) + response = client.get_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetConnectionProfileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.ConnectionProfile) + assert response.name == 'name_value' + assert response.state == clouddms_resources.ConnectionProfile.State.DRAFT + assert response.display_name == 'display_name_value' + assert response.provider == clouddms_resources.DatabaseProvider.CLOUDSQL + + +def test_get_connection_profile_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + client.get_connection_profile() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetConnectionProfileRequest() + +@pytest.mark.asyncio +async def test_get_connection_profile_async(transport: str = 'grpc_asyncio', request_type=clouddms.GetConnectionProfileRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.ConnectionProfile( + name='name_value', + state=clouddms_resources.ConnectionProfile.State.DRAFT, + display_name='display_name_value', + provider=clouddms_resources.DatabaseProvider.CLOUDSQL, + )) + response = await client.get_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetConnectionProfileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.ConnectionProfile) + assert response.name == 'name_value' + assert response.state == clouddms_resources.ConnectionProfile.State.DRAFT + assert response.display_name == 'display_name_value' + assert response.provider == clouddms_resources.DatabaseProvider.CLOUDSQL + + +@pytest.mark.asyncio +async def test_get_connection_profile_async_from_dict(): + await test_get_connection_profile_async(request_type=dict) + + +def test_get_connection_profile_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetConnectionProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + call.return_value = clouddms_resources.ConnectionProfile() + client.get_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_connection_profile_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetConnectionProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.ConnectionProfile()) + await client.get_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_connection_profile_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.ConnectionProfile() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_connection_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_connection_profile_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection_profile( + clouddms.GetConnectionProfileRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_connection_profile_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.ConnectionProfile() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.ConnectionProfile()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_connection_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_connection_profile_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_connection_profile( + clouddms.GetConnectionProfileRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.CreateConnectionProfileRequest, + dict, +]) +def test_create_connection_profile(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateConnectionProfileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_connection_profile_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + client.create_connection_profile() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateConnectionProfileRequest() + +@pytest.mark.asyncio +async def test_create_connection_profile_async(transport: str = 'grpc_asyncio', request_type=clouddms.CreateConnectionProfileRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateConnectionProfileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_connection_profile_async_from_dict(): + await test_create_connection_profile_async(request_type=dict) + + +def test_create_connection_profile_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateConnectionProfileRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_connection_profile_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateConnectionProfileRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_connection_profile_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_connection_profile( + parent='parent_value', + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + connection_profile_id='connection_profile_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].connection_profile + mock_val = clouddms_resources.ConnectionProfile(name='name_value') + assert arg == mock_val + arg = args[0].connection_profile_id + mock_val = 'connection_profile_id_value' + assert arg == mock_val + + +def test_create_connection_profile_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_connection_profile( + clouddms.CreateConnectionProfileRequest(), + parent='parent_value', + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + connection_profile_id='connection_profile_id_value', + ) + +@pytest.mark.asyncio +async def test_create_connection_profile_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_connection_profile( + parent='parent_value', + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + connection_profile_id='connection_profile_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].connection_profile + mock_val = clouddms_resources.ConnectionProfile(name='name_value') + assert arg == mock_val + arg = args[0].connection_profile_id + mock_val = 'connection_profile_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_connection_profile_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_connection_profile( + clouddms.CreateConnectionProfileRequest(), + parent='parent_value', + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + connection_profile_id='connection_profile_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.UpdateConnectionProfileRequest, + dict, +]) +def test_update_connection_profile(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateConnectionProfileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_connection_profile_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + client.update_connection_profile() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateConnectionProfileRequest() + +@pytest.mark.asyncio +async def test_update_connection_profile_async(transport: str = 'grpc_asyncio', request_type=clouddms.UpdateConnectionProfileRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateConnectionProfileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_connection_profile_async_from_dict(): + await test_update_connection_profile_async(request_type=dict) + + +def test_update_connection_profile_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateConnectionProfileRequest() + + request.connection_profile.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'connection_profile.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_connection_profile_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateConnectionProfileRequest() + + request.connection_profile.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'connection_profile.name=name_value', + ) in kw['metadata'] + + +def test_update_connection_profile_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_connection_profile( + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].connection_profile + mock_val = clouddms_resources.ConnectionProfile(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_connection_profile_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_connection_profile( + clouddms.UpdateConnectionProfileRequest(), + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_connection_profile_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_connection_profile( + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].connection_profile + mock_val = clouddms_resources.ConnectionProfile(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_connection_profile_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_connection_profile( + clouddms.UpdateConnectionProfileRequest(), + connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.DeleteConnectionProfileRequest, + dict, +]) +def test_delete_connection_profile(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteConnectionProfileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_connection_profile_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + client.delete_connection_profile() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteConnectionProfileRequest() + +@pytest.mark.asyncio +async def test_delete_connection_profile_async(transport: str = 'grpc_asyncio', request_type=clouddms.DeleteConnectionProfileRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteConnectionProfileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_connection_profile_async_from_dict(): + await test_delete_connection_profile_async(request_type=dict) + + +def test_delete_connection_profile_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteConnectionProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_connection_profile_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteConnectionProfileRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_connection_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_connection_profile_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_connection_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_connection_profile_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_connection_profile( + clouddms.DeleteConnectionProfileRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_connection_profile_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection_profile), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_connection_profile( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_connection_profile_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_connection_profile( + clouddms.DeleteConnectionProfileRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.CreatePrivateConnectionRequest, + dict, +]) +def test_create_private_connection(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreatePrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_private_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + client.create_private_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreatePrivateConnectionRequest() + +@pytest.mark.asyncio +async def test_create_private_connection_async(transport: str = 'grpc_asyncio', request_type=clouddms.CreatePrivateConnectionRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreatePrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_private_connection_async_from_dict(): + await test_create_private_connection_async(request_type=dict) + + +def test_create_private_connection_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreatePrivateConnectionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_private_connection_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreatePrivateConnectionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_private_connection_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_private_connection( + parent='parent_value', + private_connection=clouddms_resources.PrivateConnection(name='name_value'), + private_connection_id='private_connection_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].private_connection + mock_val = clouddms_resources.PrivateConnection(name='name_value') + assert arg == mock_val + arg = args[0].private_connection_id + mock_val = 'private_connection_id_value' + assert arg == mock_val + + +def test_create_private_connection_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_private_connection( + clouddms.CreatePrivateConnectionRequest(), + parent='parent_value', + private_connection=clouddms_resources.PrivateConnection(name='name_value'), + private_connection_id='private_connection_id_value', + ) + +@pytest.mark.asyncio +async def test_create_private_connection_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_private_connection( + parent='parent_value', + private_connection=clouddms_resources.PrivateConnection(name='name_value'), + private_connection_id='private_connection_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].private_connection + mock_val = clouddms_resources.PrivateConnection(name='name_value') + assert arg == mock_val + arg = args[0].private_connection_id + mock_val = 'private_connection_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_private_connection_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_private_connection( + clouddms.CreatePrivateConnectionRequest(), + parent='parent_value', + private_connection=clouddms_resources.PrivateConnection(name='name_value'), + private_connection_id='private_connection_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.GetPrivateConnectionRequest, + dict, +]) +def test_get_private_connection(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.PrivateConnection( + name='name_value', + display_name='display_name_value', + state=clouddms_resources.PrivateConnection.State.CREATING, + vpc_peering_config=clouddms_resources.VpcPeeringConfig(vpc_name='vpc_name_value'), + ) + response = client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetPrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.PrivateConnection) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.state == clouddms_resources.PrivateConnection.State.CREATING + + +def test_get_private_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + client.get_private_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetPrivateConnectionRequest() + +@pytest.mark.asyncio +async def test_get_private_connection_async(transport: str = 'grpc_asyncio', request_type=clouddms.GetPrivateConnectionRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.PrivateConnection( + name='name_value', + display_name='display_name_value', + state=clouddms_resources.PrivateConnection.State.CREATING, + )) + response = await client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetPrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.PrivateConnection) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.state == clouddms_resources.PrivateConnection.State.CREATING + + +@pytest.mark.asyncio +async def test_get_private_connection_async_from_dict(): + await test_get_private_connection_async(request_type=dict) + + +def test_get_private_connection_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetPrivateConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + call.return_value = clouddms_resources.PrivateConnection() + client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_private_connection_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetPrivateConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.PrivateConnection()) + await client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_private_connection_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.PrivateConnection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_private_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_private_connection_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_private_connection( + clouddms.GetPrivateConnectionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_private_connection_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.PrivateConnection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.PrivateConnection()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_private_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_private_connection_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_private_connection( + clouddms.GetPrivateConnectionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.ListPrivateConnectionsRequest, + dict, +]) +def test_list_private_connections(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListPrivateConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListPrivateConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPrivateConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_private_connections_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + client.list_private_connections() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListPrivateConnectionsRequest() + +@pytest.mark.asyncio +async def test_list_private_connections_async(transport: str = 'grpc_asyncio', request_type=clouddms.ListPrivateConnectionsRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListPrivateConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListPrivateConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPrivateConnectionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_private_connections_async_from_dict(): + await test_list_private_connections_async(request_type=dict) + + +def test_list_private_connections_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListPrivateConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + call.return_value = clouddms.ListPrivateConnectionsResponse() + client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_private_connections_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListPrivateConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListPrivateConnectionsResponse()) + await client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_private_connections_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListPrivateConnectionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_private_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_private_connections_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_private_connections( + clouddms.ListPrivateConnectionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_private_connections_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListPrivateConnectionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListPrivateConnectionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_private_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_private_connections_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_private_connections( + clouddms.ListPrivateConnectionsRequest(), + parent='parent_value', + ) + + +def test_list_private_connections_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token='abc', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token='def', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token='ghi', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_private_connections(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, clouddms_resources.PrivateConnection) + for i in results) +def test_list_private_connections_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token='abc', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token='def', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token='ghi', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + pages = list(client.list_private_connections(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_private_connections_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token='abc', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token='def', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token='ghi', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_private_connections(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, clouddms_resources.PrivateConnection) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_private_connections_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token='abc', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token='def', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token='ghi', + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_private_connections(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + clouddms.DeletePrivateConnectionRequest, + dict, +]) +def test_delete_private_connection(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeletePrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_private_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + client.delete_private_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeletePrivateConnectionRequest() + +@pytest.mark.asyncio +async def test_delete_private_connection_async(transport: str = 'grpc_asyncio', request_type=clouddms.DeletePrivateConnectionRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeletePrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_private_connection_async_from_dict(): + await test_delete_private_connection_async(request_type=dict) + + +def test_delete_private_connection_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeletePrivateConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_private_connection_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeletePrivateConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_private_connection_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_private_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_private_connection_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_private_connection( + clouddms.DeletePrivateConnectionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_private_connection_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_private_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_private_connection_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_private_connection( + clouddms.DeletePrivateConnectionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.GetConversionWorkspaceRequest, + dict, +]) +def test_get_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.ConversionWorkspace( + name='name_value', + has_uncommitted_changes=True, + latest_commit_id='latest_commit_id_value', + display_name='display_name_value', + ) + response = client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, conversionworkspace_resources.ConversionWorkspace) + assert response.name == 'name_value' + assert response.has_uncommitted_changes is True + assert response.latest_commit_id == 'latest_commit_id_value' + assert response.display_name == 'display_name_value' + + +def test_get_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + client.get_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetConversionWorkspaceRequest() + +@pytest.mark.asyncio +async def test_get_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.GetConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.ConversionWorkspace( + name='name_value', + has_uncommitted_changes=True, + latest_commit_id='latest_commit_id_value', + display_name='display_name_value', + )) + response = await client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, conversionworkspace_resources.ConversionWorkspace) + assert response.name == 'name_value' + assert response.has_uncommitted_changes is True + assert response.latest_commit_id == 'latest_commit_id_value' + assert response.display_name == 'display_name_value' + + +@pytest.mark.asyncio +async def test_get_conversion_workspace_async_from_dict(): + await test_get_conversion_workspace_async(request_type=dict) + + +def test_get_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + call.return_value = conversionworkspace_resources.ConversionWorkspace() + client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.ConversionWorkspace()) + await client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.ConversionWorkspace() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_conversion_workspace( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_conversion_workspace( + clouddms.GetConversionWorkspaceRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.ConversionWorkspace() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.ConversionWorkspace()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_conversion_workspace( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_conversion_workspace( + clouddms.GetConversionWorkspaceRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.ListConversionWorkspacesRequest, + dict, +]) +def test_list_conversion_workspaces(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConversionWorkspacesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListConversionWorkspacesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConversionWorkspacesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_conversion_workspaces_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + client.list_conversion_workspaces() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListConversionWorkspacesRequest() + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async(transport: str = 'grpc_asyncio', request_type=clouddms.ListConversionWorkspacesRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConversionWorkspacesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListConversionWorkspacesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConversionWorkspacesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async_from_dict(): + await test_list_conversion_workspaces_async(request_type=dict) + + +def test_list_conversion_workspaces_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListConversionWorkspacesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + call.return_value = clouddms.ListConversionWorkspacesResponse() + client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListConversionWorkspacesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConversionWorkspacesResponse()) + await client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_conversion_workspaces_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConversionWorkspacesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_conversion_workspaces( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_conversion_workspaces_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_conversion_workspaces( + clouddms.ListConversionWorkspacesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConversionWorkspacesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConversionWorkspacesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_conversion_workspaces( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_conversion_workspaces( + clouddms.ListConversionWorkspacesRequest(), + parent='parent_value', + ) + + +def test_list_conversion_workspaces_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='abc', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token='def', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='ghi', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_conversion_workspaces(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, conversionworkspace_resources.ConversionWorkspace) + for i in results) +def test_list_conversion_workspaces_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='abc', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token='def', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='ghi', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + pages = list(client.list_conversion_workspaces(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='abc', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token='def', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='ghi', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_conversion_workspaces(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, conversionworkspace_resources.ConversionWorkspace) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='abc', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token='def', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token='ghi', + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_conversion_workspaces(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + clouddms.CreateConversionWorkspaceRequest, + dict, +]) +def test_create_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + client.create_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateConversionWorkspaceRequest() + +@pytest.mark.asyncio +async def test_create_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.CreateConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_conversion_workspace_async_from_dict(): + await test_create_conversion_workspace_async(request_type=dict) + + +def test_create_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateConversionWorkspaceRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateConversionWorkspaceRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_conversion_workspace( + parent='parent_value', + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + conversion_workspace_id='conversion_workspace_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name='name_value') + assert arg == mock_val + arg = args[0].conversion_workspace_id + mock_val = 'conversion_workspace_id_value' + assert arg == mock_val + + +def test_create_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_conversion_workspace( + clouddms.CreateConversionWorkspaceRequest(), + parent='parent_value', + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + conversion_workspace_id='conversion_workspace_id_value', + ) + +@pytest.mark.asyncio +async def test_create_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_conversion_workspace( + parent='parent_value', + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + conversion_workspace_id='conversion_workspace_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name='name_value') + assert arg == mock_val + arg = args[0].conversion_workspace_id + mock_val = 'conversion_workspace_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_conversion_workspace( + clouddms.CreateConversionWorkspaceRequest(), + parent='parent_value', + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + conversion_workspace_id='conversion_workspace_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.UpdateConversionWorkspaceRequest, + dict, +]) +def test_update_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + client.update_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateConversionWorkspaceRequest() + +@pytest.mark.asyncio +async def test_update_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.UpdateConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_conversion_workspace_async_from_dict(): + await test_update_conversion_workspace_async(request_type=dict) + + +def test_update_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateConversionWorkspaceRequest() + + request.conversion_workspace.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateConversionWorkspaceRequest() + + request.conversion_workspace.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace.name=name_value', + ) in kw['metadata'] + + +def test_update_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_conversion_workspace( + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_conversion_workspace( + clouddms.UpdateConversionWorkspaceRequest(), + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_conversion_workspace( + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_conversion_workspace( + clouddms.UpdateConversionWorkspaceRequest(), + conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.DeleteConversionWorkspaceRequest, + dict, +]) +def test_delete_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + client.delete_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteConversionWorkspaceRequest() + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.DeleteConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_async_from_dict(): + await test_delete_conversion_workspace_async(request_type=dict) + + +def test_delete_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_conversion_workspace( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_conversion_workspace( + clouddms.DeleteConversionWorkspaceRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_conversion_workspace( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_conversion_workspace( + clouddms.DeleteConversionWorkspaceRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + clouddms.SeedConversionWorkspaceRequest, + dict, +]) +def test_seed_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SeedConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_seed_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), + '__call__') as call: + client.seed_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SeedConversionWorkspaceRequest() + +@pytest.mark.asyncio +async def test_seed_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.SeedConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SeedConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_seed_conversion_workspace_async_from_dict(): + await test_seed_conversion_workspace_async(request_type=dict) + + +def test_seed_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SeedConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_seed_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SeedConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.ImportMappingRulesRequest, + dict, +]) +def test_import_mapping_rules(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ImportMappingRulesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_mapping_rules_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), + '__call__') as call: + client.import_mapping_rules() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ImportMappingRulesRequest() + +@pytest.mark.asyncio +async def test_import_mapping_rules_async(transport: str = 'grpc_asyncio', request_type=clouddms.ImportMappingRulesRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ImportMappingRulesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_import_mapping_rules_async_from_dict(): + await test_import_mapping_rules_async(request_type=dict) + + +def test_import_mapping_rules_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ImportMappingRulesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_import_mapping_rules_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ImportMappingRulesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.ConvertConversionWorkspaceRequest, + dict, +]) +def test_convert_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ConvertConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_convert_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), + '__call__') as call: + client.convert_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ConvertConversionWorkspaceRequest() + +@pytest.mark.asyncio +async def test_convert_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.ConvertConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ConvertConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_convert_conversion_workspace_async_from_dict(): + await test_convert_conversion_workspace_async(request_type=dict) + + +def test_convert_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ConvertConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_convert_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ConvertConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.CommitConversionWorkspaceRequest, + dict, +]) +def test_commit_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CommitConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_commit_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), + '__call__') as call: + client.commit_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CommitConversionWorkspaceRequest() + +@pytest.mark.asyncio +async def test_commit_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.CommitConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CommitConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_commit_conversion_workspace_async_from_dict(): + await test_commit_conversion_workspace_async(request_type=dict) + + +def test_commit_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CommitConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_commit_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CommitConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.RollbackConversionWorkspaceRequest, + dict, +]) +def test_rollback_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.RollbackConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_rollback_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), + '__call__') as call: + client.rollback_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.RollbackConversionWorkspaceRequest() + +@pytest.mark.asyncio +async def test_rollback_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.RollbackConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.RollbackConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_rollback_conversion_workspace_async_from_dict(): + await test_rollback_conversion_workspace_async(request_type=dict) + + +def test_rollback_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.RollbackConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_rollback_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.RollbackConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.ApplyConversionWorkspaceRequest, + dict, +]) +def test_apply_conversion_workspace(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ApplyConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_apply_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), + '__call__') as call: + client.apply_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ApplyConversionWorkspaceRequest() + +@pytest.mark.asyncio +async def test_apply_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.ApplyConversionWorkspaceRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ApplyConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_apply_conversion_workspace_async_from_dict(): + await test_apply_conversion_workspace_async(request_type=dict) + + +def test_apply_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ApplyConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_apply_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ApplyConversionWorkspaceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.DescribeDatabaseEntitiesRequest, + dict, +]) +def test_describe_database_entities(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.DescribeDatabaseEntitiesResponse( + next_page_token='next_page_token_value', + ) + response = client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeDatabaseEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.DescribeDatabaseEntitiesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_describe_database_entities_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + client.describe_database_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeDatabaseEntitiesRequest() + +@pytest.mark.asyncio +async def test_describe_database_entities_async(transport: str = 'grpc_asyncio', request_type=clouddms.DescribeDatabaseEntitiesRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.DescribeDatabaseEntitiesResponse( + next_page_token='next_page_token_value', + )) + response = await client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeDatabaseEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.DescribeDatabaseEntitiesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_describe_database_entities_async_from_dict(): + await test_describe_database_entities_async(request_type=dict) + + +def test_describe_database_entities_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeDatabaseEntitiesRequest() + + request.conversion_workspace = 'conversion_workspace_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + call.return_value = clouddms.DescribeDatabaseEntitiesResponse() + client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace=conversion_workspace_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_describe_database_entities_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeDatabaseEntitiesRequest() + + request.conversion_workspace = 'conversion_workspace_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.DescribeDatabaseEntitiesResponse()) + await client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace=conversion_workspace_value', + ) in kw['metadata'] + + +def test_describe_database_entities_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='abc', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token='def', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='ghi', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('conversion_workspace', ''), + )), + ) + pager = client.describe_database_entities(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, conversionworkspace_resources.DatabaseEntity) + for i in results) +def test_describe_database_entities_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='abc', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token='def', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='ghi', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + pages = list(client.describe_database_entities(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_describe_database_entities_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='abc', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token='def', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='ghi', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + async_pager = await client.describe_database_entities(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, conversionworkspace_resources.DatabaseEntity) + for i in responses) + + +@pytest.mark.asyncio +async def test_describe_database_entities_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='abc', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token='def', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token='ghi', + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.describe_database_entities(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + clouddms.SearchBackgroundJobsRequest, + dict, +]) +def test_search_background_jobs(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.SearchBackgroundJobsResponse( + ) + response = client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SearchBackgroundJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.SearchBackgroundJobsResponse) + + +def test_search_background_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), + '__call__') as call: + client.search_background_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SearchBackgroundJobsRequest() + +@pytest.mark.asyncio +async def test_search_background_jobs_async(transport: str = 'grpc_asyncio', request_type=clouddms.SearchBackgroundJobsRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.SearchBackgroundJobsResponse( + )) + response = await client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SearchBackgroundJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.SearchBackgroundJobsResponse) + + +@pytest.mark.asyncio +async def test_search_background_jobs_async_from_dict(): + await test_search_background_jobs_async(request_type=dict) + + +def test_search_background_jobs_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SearchBackgroundJobsRequest() + + request.conversion_workspace = 'conversion_workspace_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), + '__call__') as call: + call.return_value = clouddms.SearchBackgroundJobsResponse() + client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace=conversion_workspace_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_search_background_jobs_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SearchBackgroundJobsRequest() + + request.conversion_workspace = 'conversion_workspace_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.SearchBackgroundJobsResponse()) + await client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace=conversion_workspace_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.DescribeConversionWorkspaceRevisionsRequest, + dict, +]) +def test_describe_conversion_workspace_revisions(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.DescribeConversionWorkspaceRevisionsResponse( + ) + response = client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeConversionWorkspaceRevisionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.DescribeConversionWorkspaceRevisionsResponse) + + +def test_describe_conversion_workspace_revisions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), + '__call__') as call: + client.describe_conversion_workspace_revisions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeConversionWorkspaceRevisionsRequest() + +@pytest.mark.asyncio +async def test_describe_conversion_workspace_revisions_async(transport: str = 'grpc_asyncio', request_type=clouddms.DescribeConversionWorkspaceRevisionsRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.DescribeConversionWorkspaceRevisionsResponse( + )) + response = await client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeConversionWorkspaceRevisionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.DescribeConversionWorkspaceRevisionsResponse) + + +@pytest.mark.asyncio +async def test_describe_conversion_workspace_revisions_async_from_dict(): + await test_describe_conversion_workspace_revisions_async(request_type=dict) + + +def test_describe_conversion_workspace_revisions_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeConversionWorkspaceRevisionsRequest() + + request.conversion_workspace = 'conversion_workspace_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), + '__call__') as call: + call.return_value = clouddms.DescribeConversionWorkspaceRevisionsResponse() + client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace=conversion_workspace_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_describe_conversion_workspace_revisions_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeConversionWorkspaceRevisionsRequest() + + request.conversion_workspace = 'conversion_workspace_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.DescribeConversionWorkspaceRevisionsResponse()) + await client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'conversion_workspace=conversion_workspace_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + clouddms.FetchStaticIpsRequest, + dict, +]) +def test_fetch_static_ips(request_type, transport: str = 'grpc'): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.FetchStaticIpsResponse( + static_ips=['static_ips_value'], + next_page_token='next_page_token_value', + ) + response = client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.FetchStaticIpsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchStaticIpsPager) + assert response.static_ips == ['static_ips_value'] + assert response.next_page_token == 'next_page_token_value' + + +def test_fetch_static_ips_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + client.fetch_static_ips() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.FetchStaticIpsRequest() + +@pytest.mark.asyncio +async def test_fetch_static_ips_async(transport: str = 'grpc_asyncio', request_type=clouddms.FetchStaticIpsRequest): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.FetchStaticIpsResponse( + static_ips=['static_ips_value'], + next_page_token='next_page_token_value', + )) + response = await client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.FetchStaticIpsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchStaticIpsAsyncPager) + assert response.static_ips == ['static_ips_value'] + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_fetch_static_ips_async_from_dict(): + await test_fetch_static_ips_async(request_type=dict) + + +def test_fetch_static_ips_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.FetchStaticIpsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + call.return_value = clouddms.FetchStaticIpsResponse() + client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_fetch_static_ips_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.FetchStaticIpsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.FetchStaticIpsResponse()) + await client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_fetch_static_ips_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.FetchStaticIpsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_static_ips( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_fetch_static_ips_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_static_ips( + clouddms.FetchStaticIpsRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_fetch_static_ips_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.FetchStaticIpsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.FetchStaticIpsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_static_ips( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_fetch_static_ips_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_static_ips( + clouddms.FetchStaticIpsRequest(), + name='name_value', + ) + + +def test_fetch_static_ips_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token='def', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token='ghi', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('name', ''), + )), + ) + pager = client.fetch_static_ips(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) + for i in results) +def test_fetch_static_ips_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token='def', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token='ghi', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_static_ips(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_fetch_static_ips_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token='def', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token='ghi', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_static_ips(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) + for i in responses) + + +@pytest.mark.asyncio +async def test_fetch_static_ips_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token='def', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token='ghi', + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.fetch_static_ips(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataMigrationServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataMigrationServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = DataMigrationServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataMigrationServiceGrpcTransport, + ) + +def test_data_migration_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataMigrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_data_migration_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DataMigrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_migration_jobs', + 'get_migration_job', + 'create_migration_job', + 'update_migration_job', + 'delete_migration_job', + 'start_migration_job', + 'stop_migration_job', + 'resume_migration_job', + 'promote_migration_job', + 'verify_migration_job', + 'restart_migration_job', + 'generate_ssh_script', + 'list_connection_profiles', + 'get_connection_profile', + 'create_connection_profile', + 'update_connection_profile', + 'delete_connection_profile', + 'create_private_connection', + 'get_private_connection', + 'list_private_connections', + 'delete_private_connection', + 'get_conversion_workspace', + 'list_conversion_workspaces', + 'create_conversion_workspace', + 'update_conversion_workspace', + 'delete_conversion_workspace', + 'seed_conversion_workspace', + 'import_mapping_rules', + 'convert_conversion_workspace', + 'commit_conversion_workspace', + 'rollback_conversion_workspace', + 'apply_conversion_workspace', + 'describe_database_entities', + 'search_background_jobs', + 'describe_conversion_workspace_revisions', + 'fetch_static_ips', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_data_migration_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataMigrationServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_data_migration_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataMigrationServiceTransport() + adc.assert_called_once() + + +def test_data_migration_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataMigrationServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_data_migration_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_data_migration_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataMigrationServiceGrpcTransport, grpc_helpers), + (transports.DataMigrationServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_data_migration_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "datamigration.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="datamigration.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DataMigrationServiceGrpcTransport, transports.DataMigrationServiceGrpcAsyncIOTransport]) +def test_data_migration_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_data_migration_service_host_no_port(transport_name): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datamigration.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datamigration.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_data_migration_service_host_with_port(transport_name): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datamigration.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datamigration.googleapis.com:8000' + ) + +def test_data_migration_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataMigrationServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_data_migration_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataMigrationServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataMigrationServiceGrpcTransport, transports.DataMigrationServiceGrpcAsyncIOTransport]) +def test_data_migration_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataMigrationServiceGrpcTransport, transports.DataMigrationServiceGrpcAsyncIOTransport]) +def test_data_migration_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_migration_service_grpc_lro_client(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_migration_service_grpc_lro_async_client(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_connection_profile_path(): + project = "squid" + location = "clam" + connection_profile = "whelk" + expected = "projects/{project}/locations/{location}/connectionProfiles/{connection_profile}".format(project=project, location=location, connection_profile=connection_profile, ) + actual = DataMigrationServiceClient.connection_profile_path(project, location, connection_profile) + assert expected == actual + + +def test_parse_connection_profile_path(): + expected = { + "project": "octopus", + "location": "oyster", + "connection_profile": "nudibranch", + } + path = DataMigrationServiceClient.connection_profile_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_connection_profile_path(path) + assert expected == actual + +def test_conversion_workspace_path(): + project = "cuttlefish" + location = "mussel" + conversion_workspace = "winkle" + expected = "projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}".format(project=project, location=location, conversion_workspace=conversion_workspace, ) + actual = DataMigrationServiceClient.conversion_workspace_path(project, location, conversion_workspace) + assert expected == actual + + +def test_parse_conversion_workspace_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "conversion_workspace": "abalone", + } + path = DataMigrationServiceClient.conversion_workspace_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_conversion_workspace_path(path) + assert expected == actual + +def test_migration_job_path(): + project = "squid" + location = "clam" + migration_job = "whelk" + expected = "projects/{project}/locations/{location}/migrationJobs/{migration_job}".format(project=project, location=location, migration_job=migration_job, ) + actual = DataMigrationServiceClient.migration_job_path(project, location, migration_job) + assert expected == actual + + +def test_parse_migration_job_path(): + expected = { + "project": "octopus", + "location": "oyster", + "migration_job": "nudibranch", + } + path = DataMigrationServiceClient.migration_job_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_migration_job_path(path) + assert expected == actual + +def test_networks_path(): + project = "cuttlefish" + network = "mussel" + expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + actual = DataMigrationServiceClient.networks_path(project, network) + assert expected == actual + + +def test_parse_networks_path(): + expected = { + "project": "winkle", + "network": "nautilus", + } + path = DataMigrationServiceClient.networks_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_networks_path(path) + assert expected == actual + +def test_private_connection_path(): + project = "scallop" + location = "abalone" + private_connection = "squid" + expected = "projects/{project}/locations/{location}/privateConnections/{private_connection}".format(project=project, location=location, private_connection=private_connection, ) + actual = DataMigrationServiceClient.private_connection_path(project, location, private_connection) + assert expected == actual + + +def test_parse_private_connection_path(): + expected = { + "project": "clam", + "location": "whelk", + "private_connection": "octopus", + } + path = DataMigrationServiceClient.private_connection_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_private_connection_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DataMigrationServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = DataMigrationServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = DataMigrationServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = DataMigrationServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DataMigrationServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = DataMigrationServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = DataMigrationServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = DataMigrationServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DataMigrationServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = DataMigrationServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DataMigrationServiceTransport, '_prep_wrapped_messages') as prep: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DataMigrationServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = DataMigrationServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_delete_operation(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + +def test_set_iam_policy_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + +def test_set_iam_policy_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + +def test_get_iam_policy(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +def test_test_iam_permissions(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport), + (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 1490e271c4e74fe69aa880c10c354fc7fa4fde67 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 18 May 2023 08:49:46 +0000 Subject: [PATCH 2/2] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- google/cloud/clouddms/__init__.py | 116 + google/cloud/clouddms_v1/__init__.py | 116 + google/cloud/clouddms_v1/gapic_metadata.json | 190 + .../data_migration_service/async_client.py | 2783 +++- .../services/data_migration_service/client.py | 2887 +++- .../services/data_migration_service/pagers.py | 520 +- .../data_migration_service/transports/base.py | 382 +- .../data_migration_service/transports/grpc.py | 757 +- .../transports/grpc_asyncio.py | 773 +- google/cloud/clouddms_v1/types/__init__.py | 116 + google/cloud/clouddms_v1/types/clouddms.py | 1038 +- .../clouddms_v1/types/clouddms_resources.py | 801 +- .../types/conversionworkspace_resources.py | 167 +- owl-bot-staging/v1/.coveragerc | 13 - owl-bot-staging/v1/.flake8 | 33 - owl-bot-staging/v1/MANIFEST.in | 2 - owl-bot-staging/v1/README.rst | 49 - .../clouddms_v1/data_migration_service.rst | 10 - .../v1/docs/clouddms_v1/services.rst | 6 - owl-bot-staging/v1/docs/clouddms_v1/types.rst | 6 - owl-bot-staging/v1/docs/conf.py | 376 - owl-bot-staging/v1/docs/index.rst | 7 - .../v1/google/cloud/clouddms/__init__.py | 219 - .../v1/google/cloud/clouddms/gapic_version.py | 16 - .../v1/google/cloud/clouddms/py.typed | 2 - .../v1/google/cloud/clouddms_v1/__init__.py | 220 - .../cloud/clouddms_v1/gapic_metadata.json | 383 - .../google/cloud/clouddms_v1/gapic_version.py | 16 - .../v1/google/cloud/clouddms_v1/py.typed | 2 - .../cloud/clouddms_v1/services/__init__.py | 15 - .../data_migration_service/__init__.py | 22 - .../data_migration_service/async_client.py | 4804 ------- .../services/data_migration_service/client.py | 5053 ------- .../services/data_migration_service/pagers.py | 746 -- .../transports/__init__.py | 33 - .../data_migration_service/transports/base.py | 733 -- .../data_migration_service/transports/grpc.py | 1430 -- .../transports/grpc_asyncio.py | 1429 -- .../cloud/clouddms_v1/types/__init__.py | 216 - .../cloud/clouddms_v1/types/clouddms.py | 1718 --- .../clouddms_v1/types/clouddms_resources.py | 2025 --- owl-bot-staging/v1/mypy.ini | 3 - owl-bot-staging/v1/noxfile.py | 184 - ...service_create_connection_profile_async.py | 64 - ..._service_create_connection_profile_sync.py | 64 - ...tion_service_create_migration_job_async.py | 65 - ...ation_service_create_migration_job_sync.py | 65 - ...service_delete_connection_profile_async.py | 56 - ..._service_delete_connection_profile_sync.py | 56 - ...tion_service_delete_migration_job_async.py | 56 - ...ation_service_delete_migration_job_sync.py | 56 - ...ation_service_generate_ssh_script_async.py | 56 - ...ration_service_generate_ssh_script_sync.py | 56 - ...on_service_get_connection_profile_async.py | 52 - ...ion_service_get_connection_profile_sync.py | 52 - ...gration_service_get_migration_job_async.py | 52 - ...igration_service_get_migration_job_sync.py | 52 - ..._service_list_connection_profiles_async.py | 53 - ...n_service_list_connection_profiles_sync.py | 53 - ...ation_service_list_migration_jobs_async.py | 53 - ...ration_service_list_migration_jobs_sync.py | 53 - ...ion_service_promote_migration_job_async.py | 55 - ...tion_service_promote_migration_job_sync.py | 55 - ...ion_service_restart_migration_job_async.py | 55 - ...tion_service_restart_migration_job_sync.py | 55 - ...tion_service_resume_migration_job_async.py | 55 - ...ation_service_resume_migration_job_sync.py | 55 - ...ation_service_start_migration_job_async.py | 55 - ...ration_service_start_migration_job_sync.py | 55 - ...ration_service_stop_migration_job_async.py | 55 - ...gration_service_stop_migration_job_sync.py | 55 - ...service_update_connection_profile_async.py | 62 - ..._service_update_connection_profile_sync.py | 62 - ...tion_service_update_migration_job_async.py | 63 - ...ation_service_update_migration_job_sync.py | 63 - ...tion_service_verify_migration_job_async.py | 55 - ...ation_service_verify_migration_job_sync.py | 55 - ...pet_metadata_google.cloud.clouddms.v1.json | 5771 -------- .../v1/scripts/fixup_clouddms_v1_keywords.py | 211 - owl-bot-staging/v1/setup.py | 91 - .../v1/testing/constraints-3.10.txt | 7 - .../v1/testing/constraints-3.11.txt | 7 - .../v1/testing/constraints-3.12.txt | 7 - .../v1/testing/constraints-3.7.txt | 10 - .../v1/testing/constraints-3.8.txt | 7 - .../v1/testing/constraints-3.9.txt | 7 - owl-bot-staging/v1/tests/__init__.py | 16 - owl-bot-staging/v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../tests/unit/gapic/clouddms_v1/__init__.py | 16 - .../test_data_migration_service.py | 10874 ---------------- ...ervice_apply_conversion_workspace_async.py | 0 ...service_apply_conversion_workspace_sync.py | 0 ...rvice_commit_conversion_workspace_async.py | 0 ...ervice_commit_conversion_workspace_sync.py | 0 ...vice_convert_conversion_workspace_async.py | 0 ...rvice_convert_conversion_workspace_sync.py | 0 ...rvice_create_conversion_workspace_async.py | 0 ...ervice_create_conversion_workspace_sync.py | 0 ...service_create_private_connection_async.py | 0 ..._service_create_private_connection_sync.py | 0 ...rvice_delete_conversion_workspace_async.py | 0 ...ervice_delete_conversion_workspace_sync.py | 0 ...service_delete_private_connection_async.py | 0 ..._service_delete_private_connection_sync.py | 0 ...be_conversion_workspace_revisions_async.py | 0 ...ibe_conversion_workspace_revisions_sync.py | 0 ...ervice_describe_database_entities_async.py | 0 ...service_describe_database_entities_sync.py | 0 ...igration_service_fetch_static_ips_async.py | 0 ...migration_service_fetch_static_ips_sync.py | 0 ..._service_get_conversion_workspace_async.py | 0 ...n_service_get_conversion_workspace_sync.py | 0 ...on_service_get_private_connection_async.py | 0 ...ion_service_get_private_connection_sync.py | 0 ...tion_service_import_mapping_rules_async.py | 0 ...ation_service_import_mapping_rules_sync.py | 0 ...ervice_list_conversion_workspaces_async.py | 0 ...service_list_conversion_workspaces_sync.py | 0 ..._service_list_private_connections_async.py | 0 ...n_service_list_private_connections_sync.py | 0 ...ice_rollback_conversion_workspace_async.py | 0 ...vice_rollback_conversion_workspace_sync.py | 0 ...on_service_search_background_jobs_async.py | 0 ...ion_service_search_background_jobs_sync.py | 0 ...service_seed_conversion_workspace_async.py | 0 ..._service_seed_conversion_workspace_sync.py | 0 ...rvice_update_conversion_workspace_async.py | 0 ...ervice_update_conversion_workspace_sync.py | 0 ...pet_metadata_google.cloud.clouddms.v1.json | 3965 +++++- scripts/fixup_clouddms_v1_keywords.py | 23 +- setup.py | 1 + testing/constraints-3.10.txt | 1 + testing/constraints-3.11.txt | 1 + testing/constraints-3.12.txt | 1 + testing/constraints-3.7.txt | 1 + testing/constraints-3.8.txt | 1 + testing/constraints-3.9.txt | 1 + .../test_data_migration_service.py | 7386 ++++++++++- 139 files changed, 20786 insertions(+), 39992 deletions(-) rename {owl-bot-staging/v1/google => google}/cloud/clouddms_v1/types/conversionworkspace_resources.py (91%) delete mode 100644 owl-bot-staging/v1/.coveragerc delete mode 100644 owl-bot-staging/v1/.flake8 delete mode 100644 owl-bot-staging/v1/MANIFEST.in delete mode 100644 owl-bot-staging/v1/README.rst delete mode 100644 owl-bot-staging/v1/docs/clouddms_v1/data_migration_service.rst delete mode 100644 owl-bot-staging/v1/docs/clouddms_v1/services.rst delete mode 100644 owl-bot-staging/v1/docs/clouddms_v1/types.rst delete mode 100644 owl-bot-staging/v1/docs/conf.py delete mode 100644 owl-bot-staging/v1/docs/index.rst delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms/gapic_version.py delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_version.py delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/async_client.py delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/client.py delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/pagers.py delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/types/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms.py delete mode 100644 owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms_resources.py delete mode 100644 owl-bot-staging/v1/mypy.ini delete mode 100644 owl-bot-staging/v1/noxfile.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json delete mode 100644 owl-bot-staging/v1/scripts/fixup_clouddms_v1_keywords.py delete mode 100644 owl-bot-staging/v1/setup.py delete mode 100644 owl-bot-staging/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/v1/tests/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/test_data_migration_service.py rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py (100%) diff --git a/google/cloud/clouddms/__init__.py b/google/cloud/clouddms/__init__.py index 2cda060..7d90354 100644 --- a/google/cloud/clouddms/__init__.py +++ b/google/cloud/clouddms/__init__.py @@ -25,89 +25,205 @@ DataMigrationServiceClient, ) from google.cloud.clouddms_v1.types.clouddms import ( + ApplyConversionWorkspaceRequest, + CommitConversionWorkspaceRequest, + ConvertConversionWorkspaceRequest, CreateConnectionProfileRequest, + CreateConversionWorkspaceRequest, CreateMigrationJobRequest, + CreatePrivateConnectionRequest, DeleteConnectionProfileRequest, + DeleteConversionWorkspaceRequest, DeleteMigrationJobRequest, + DeletePrivateConnectionRequest, + DescribeConversionWorkspaceRevisionsRequest, + DescribeConversionWorkspaceRevisionsResponse, + DescribeDatabaseEntitiesRequest, + DescribeDatabaseEntitiesResponse, + FetchStaticIpsRequest, + FetchStaticIpsResponse, GenerateSshScriptRequest, GetConnectionProfileRequest, + GetConversionWorkspaceRequest, GetMigrationJobRequest, + GetPrivateConnectionRequest, + ImportMappingRulesRequest, ListConnectionProfilesRequest, ListConnectionProfilesResponse, + ListConversionWorkspacesRequest, + ListConversionWorkspacesResponse, ListMigrationJobsRequest, ListMigrationJobsResponse, + ListPrivateConnectionsRequest, + ListPrivateConnectionsResponse, OperationMetadata, PromoteMigrationJobRequest, RestartMigrationJobRequest, ResumeMigrationJobRequest, + RollbackConversionWorkspaceRequest, + SearchBackgroundJobsRequest, + SearchBackgroundJobsResponse, + SeedConversionWorkspaceRequest, SshScript, StartMigrationJobRequest, StopMigrationJobRequest, UpdateConnectionProfileRequest, + UpdateConversionWorkspaceRequest, UpdateMigrationJobRequest, VerifyMigrationJobRequest, VmCreationConfig, VmSelectionConfig, ) from google.cloud.clouddms_v1.types.clouddms_resources import ( + AlloyDbConnectionProfile, + AlloyDbSettings, CloudSqlConnectionProfile, CloudSqlSettings, ConnectionProfile, + ConversionWorkspaceInfo, DatabaseEngine, DatabaseProvider, DatabaseType, + ForwardSshTunnelConnectivity, MigrationJob, MigrationJobVerificationError, MySqlConnectionProfile, + NetworkArchitecture, + OracleConnectionProfile, PostgreSqlConnectionProfile, + PrivateConnection, + PrivateConnectivity, + PrivateServiceConnectConnectivity, ReverseSshConnectivity, SqlAclEntry, SqlIpConfig, SslConfig, StaticIpConnectivity, + StaticServiceIpConnectivity, + VpcPeeringConfig, VpcPeeringConnectivity, ) +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ( + BackgroundJobLogEntry, + BackgroundJobType, + ColumnEntity, + ConstraintEntity, + ConversionWorkspace, + DatabaseEngineInfo, + DatabaseEntity, + DatabaseEntityType, + EntityMapping, + EntityMappingLogEntry, + FunctionEntity, + ImportRulesFileFormat, + IndexEntity, + PackageEntity, + SchemaEntity, + SequenceEntity, + StoredProcedureEntity, + SynonymEntity, + TableEntity, + TriggerEntity, + ViewEntity, +) __all__ = ( "DataMigrationServiceClient", "DataMigrationServiceAsyncClient", + "ApplyConversionWorkspaceRequest", + "CommitConversionWorkspaceRequest", + "ConvertConversionWorkspaceRequest", "CreateConnectionProfileRequest", + "CreateConversionWorkspaceRequest", "CreateMigrationJobRequest", + "CreatePrivateConnectionRequest", "DeleteConnectionProfileRequest", + "DeleteConversionWorkspaceRequest", "DeleteMigrationJobRequest", + "DeletePrivateConnectionRequest", + "DescribeConversionWorkspaceRevisionsRequest", + "DescribeConversionWorkspaceRevisionsResponse", + "DescribeDatabaseEntitiesRequest", + "DescribeDatabaseEntitiesResponse", + "FetchStaticIpsRequest", + "FetchStaticIpsResponse", "GenerateSshScriptRequest", "GetConnectionProfileRequest", + "GetConversionWorkspaceRequest", "GetMigrationJobRequest", + "GetPrivateConnectionRequest", + "ImportMappingRulesRequest", "ListConnectionProfilesRequest", "ListConnectionProfilesResponse", + "ListConversionWorkspacesRequest", + "ListConversionWorkspacesResponse", "ListMigrationJobsRequest", "ListMigrationJobsResponse", + "ListPrivateConnectionsRequest", + "ListPrivateConnectionsResponse", "OperationMetadata", "PromoteMigrationJobRequest", "RestartMigrationJobRequest", "ResumeMigrationJobRequest", + "RollbackConversionWorkspaceRequest", + "SearchBackgroundJobsRequest", + "SearchBackgroundJobsResponse", + "SeedConversionWorkspaceRequest", "SshScript", "StartMigrationJobRequest", "StopMigrationJobRequest", "UpdateConnectionProfileRequest", + "UpdateConversionWorkspaceRequest", "UpdateMigrationJobRequest", "VerifyMigrationJobRequest", "VmCreationConfig", "VmSelectionConfig", + "AlloyDbConnectionProfile", + "AlloyDbSettings", "CloudSqlConnectionProfile", "CloudSqlSettings", "ConnectionProfile", + "ConversionWorkspaceInfo", "DatabaseType", + "ForwardSshTunnelConnectivity", "MigrationJob", "MigrationJobVerificationError", "MySqlConnectionProfile", + "OracleConnectionProfile", "PostgreSqlConnectionProfile", + "PrivateConnection", + "PrivateConnectivity", + "PrivateServiceConnectConnectivity", "ReverseSshConnectivity", "SqlAclEntry", "SqlIpConfig", "SslConfig", "StaticIpConnectivity", + "StaticServiceIpConnectivity", + "VpcPeeringConfig", "VpcPeeringConnectivity", "DatabaseEngine", "DatabaseProvider", + "NetworkArchitecture", + "BackgroundJobLogEntry", + "ColumnEntity", + "ConstraintEntity", + "ConversionWorkspace", + "DatabaseEngineInfo", + "DatabaseEntity", + "EntityMapping", + "EntityMappingLogEntry", + "FunctionEntity", + "IndexEntity", + "PackageEntity", + "SchemaEntity", + "SequenceEntity", + "StoredProcedureEntity", + "SynonymEntity", + "TableEntity", + "TriggerEntity", + "ViewEntity", + "BackgroundJobType", + "DatabaseEntityType", + "ImportRulesFileFormat", ) diff --git a/google/cloud/clouddms_v1/__init__.py b/google/cloud/clouddms_v1/__init__.py index cc0f3b5..0412200 100644 --- a/google/cloud/clouddms_v1/__init__.py +++ b/google/cloud/clouddms_v1/__init__.py @@ -23,89 +23,205 @@ DataMigrationServiceClient, ) from .types.clouddms import ( + ApplyConversionWorkspaceRequest, + CommitConversionWorkspaceRequest, + ConvertConversionWorkspaceRequest, CreateConnectionProfileRequest, + CreateConversionWorkspaceRequest, CreateMigrationJobRequest, + CreatePrivateConnectionRequest, DeleteConnectionProfileRequest, + DeleteConversionWorkspaceRequest, DeleteMigrationJobRequest, + DeletePrivateConnectionRequest, + DescribeConversionWorkspaceRevisionsRequest, + DescribeConversionWorkspaceRevisionsResponse, + DescribeDatabaseEntitiesRequest, + DescribeDatabaseEntitiesResponse, + FetchStaticIpsRequest, + FetchStaticIpsResponse, GenerateSshScriptRequest, GetConnectionProfileRequest, + GetConversionWorkspaceRequest, GetMigrationJobRequest, + GetPrivateConnectionRequest, + ImportMappingRulesRequest, ListConnectionProfilesRequest, ListConnectionProfilesResponse, + ListConversionWorkspacesRequest, + ListConversionWorkspacesResponse, ListMigrationJobsRequest, ListMigrationJobsResponse, + ListPrivateConnectionsRequest, + ListPrivateConnectionsResponse, OperationMetadata, PromoteMigrationJobRequest, RestartMigrationJobRequest, ResumeMigrationJobRequest, + RollbackConversionWorkspaceRequest, + SearchBackgroundJobsRequest, + SearchBackgroundJobsResponse, + SeedConversionWorkspaceRequest, SshScript, StartMigrationJobRequest, StopMigrationJobRequest, UpdateConnectionProfileRequest, + UpdateConversionWorkspaceRequest, UpdateMigrationJobRequest, VerifyMigrationJobRequest, VmCreationConfig, VmSelectionConfig, ) from .types.clouddms_resources import ( + AlloyDbConnectionProfile, + AlloyDbSettings, CloudSqlConnectionProfile, CloudSqlSettings, ConnectionProfile, + ConversionWorkspaceInfo, DatabaseEngine, DatabaseProvider, DatabaseType, + ForwardSshTunnelConnectivity, MigrationJob, MigrationJobVerificationError, MySqlConnectionProfile, + NetworkArchitecture, + OracleConnectionProfile, PostgreSqlConnectionProfile, + PrivateConnection, + PrivateConnectivity, + PrivateServiceConnectConnectivity, ReverseSshConnectivity, SqlAclEntry, SqlIpConfig, SslConfig, StaticIpConnectivity, + StaticServiceIpConnectivity, + VpcPeeringConfig, VpcPeeringConnectivity, ) +from .types.conversionworkspace_resources import ( + BackgroundJobLogEntry, + BackgroundJobType, + ColumnEntity, + ConstraintEntity, + ConversionWorkspace, + DatabaseEngineInfo, + DatabaseEntity, + DatabaseEntityType, + EntityMapping, + EntityMappingLogEntry, + FunctionEntity, + ImportRulesFileFormat, + IndexEntity, + PackageEntity, + SchemaEntity, + SequenceEntity, + StoredProcedureEntity, + SynonymEntity, + TableEntity, + TriggerEntity, + ViewEntity, +) __all__ = ( "DataMigrationServiceAsyncClient", + "AlloyDbConnectionProfile", + "AlloyDbSettings", + "ApplyConversionWorkspaceRequest", + "BackgroundJobLogEntry", + "BackgroundJobType", "CloudSqlConnectionProfile", "CloudSqlSettings", + "ColumnEntity", + "CommitConversionWorkspaceRequest", "ConnectionProfile", + "ConstraintEntity", + "ConversionWorkspace", + "ConversionWorkspaceInfo", + "ConvertConversionWorkspaceRequest", "CreateConnectionProfileRequest", + "CreateConversionWorkspaceRequest", "CreateMigrationJobRequest", + "CreatePrivateConnectionRequest", "DataMigrationServiceClient", "DatabaseEngine", + "DatabaseEngineInfo", + "DatabaseEntity", + "DatabaseEntityType", "DatabaseProvider", "DatabaseType", "DeleteConnectionProfileRequest", + "DeleteConversionWorkspaceRequest", "DeleteMigrationJobRequest", + "DeletePrivateConnectionRequest", + "DescribeConversionWorkspaceRevisionsRequest", + "DescribeConversionWorkspaceRevisionsResponse", + "DescribeDatabaseEntitiesRequest", + "DescribeDatabaseEntitiesResponse", + "EntityMapping", + "EntityMappingLogEntry", + "FetchStaticIpsRequest", + "FetchStaticIpsResponse", + "ForwardSshTunnelConnectivity", + "FunctionEntity", "GenerateSshScriptRequest", "GetConnectionProfileRequest", + "GetConversionWorkspaceRequest", "GetMigrationJobRequest", + "GetPrivateConnectionRequest", + "ImportMappingRulesRequest", + "ImportRulesFileFormat", + "IndexEntity", "ListConnectionProfilesRequest", "ListConnectionProfilesResponse", + "ListConversionWorkspacesRequest", + "ListConversionWorkspacesResponse", "ListMigrationJobsRequest", "ListMigrationJobsResponse", + "ListPrivateConnectionsRequest", + "ListPrivateConnectionsResponse", "MigrationJob", "MigrationJobVerificationError", "MySqlConnectionProfile", + "NetworkArchitecture", "OperationMetadata", + "OracleConnectionProfile", + "PackageEntity", "PostgreSqlConnectionProfile", + "PrivateConnection", + "PrivateConnectivity", + "PrivateServiceConnectConnectivity", "PromoteMigrationJobRequest", "RestartMigrationJobRequest", "ResumeMigrationJobRequest", "ReverseSshConnectivity", + "RollbackConversionWorkspaceRequest", + "SchemaEntity", + "SearchBackgroundJobsRequest", + "SearchBackgroundJobsResponse", + "SeedConversionWorkspaceRequest", + "SequenceEntity", "SqlAclEntry", "SqlIpConfig", "SshScript", "SslConfig", "StartMigrationJobRequest", "StaticIpConnectivity", + "StaticServiceIpConnectivity", "StopMigrationJobRequest", + "StoredProcedureEntity", + "SynonymEntity", + "TableEntity", + "TriggerEntity", "UpdateConnectionProfileRequest", + "UpdateConversionWorkspaceRequest", "UpdateMigrationJobRequest", "VerifyMigrationJobRequest", + "ViewEntity", "VmCreationConfig", "VmSelectionConfig", + "VpcPeeringConfig", "VpcPeeringConnectivity", ) diff --git a/google/cloud/clouddms_v1/gapic_metadata.json b/google/cloud/clouddms_v1/gapic_metadata.json index d505c03..a1d6f43 100644 --- a/google/cloud/clouddms_v1/gapic_metadata.json +++ b/google/cloud/clouddms_v1/gapic_metadata.json @@ -10,26 +10,76 @@ "grpc": { "libraryClient": "DataMigrationServiceClient", "rpcs": { + "ApplyConversionWorkspace": { + "methods": [ + "apply_conversion_workspace" + ] + }, + "CommitConversionWorkspace": { + "methods": [ + "commit_conversion_workspace" + ] + }, + "ConvertConversionWorkspace": { + "methods": [ + "convert_conversion_workspace" + ] + }, "CreateConnectionProfile": { "methods": [ "create_connection_profile" ] }, + "CreateConversionWorkspace": { + "methods": [ + "create_conversion_workspace" + ] + }, "CreateMigrationJob": { "methods": [ "create_migration_job" ] }, + "CreatePrivateConnection": { + "methods": [ + "create_private_connection" + ] + }, "DeleteConnectionProfile": { "methods": [ "delete_connection_profile" ] }, + "DeleteConversionWorkspace": { + "methods": [ + "delete_conversion_workspace" + ] + }, "DeleteMigrationJob": { "methods": [ "delete_migration_job" ] }, + "DeletePrivateConnection": { + "methods": [ + "delete_private_connection" + ] + }, + "DescribeConversionWorkspaceRevisions": { + "methods": [ + "describe_conversion_workspace_revisions" + ] + }, + "DescribeDatabaseEntities": { + "methods": [ + "describe_database_entities" + ] + }, + "FetchStaticIps": { + "methods": [ + "fetch_static_ips" + ] + }, "GenerateSshScript": { "methods": [ "generate_ssh_script" @@ -40,21 +90,46 @@ "get_connection_profile" ] }, + "GetConversionWorkspace": { + "methods": [ + "get_conversion_workspace" + ] + }, "GetMigrationJob": { "methods": [ "get_migration_job" ] }, + "GetPrivateConnection": { + "methods": [ + "get_private_connection" + ] + }, + "ImportMappingRules": { + "methods": [ + "import_mapping_rules" + ] + }, "ListConnectionProfiles": { "methods": [ "list_connection_profiles" ] }, + "ListConversionWorkspaces": { + "methods": [ + "list_conversion_workspaces" + ] + }, "ListMigrationJobs": { "methods": [ "list_migration_jobs" ] }, + "ListPrivateConnections": { + "methods": [ + "list_private_connections" + ] + }, "PromoteMigrationJob": { "methods": [ "promote_migration_job" @@ -70,6 +145,21 @@ "resume_migration_job" ] }, + "RollbackConversionWorkspace": { + "methods": [ + "rollback_conversion_workspace" + ] + }, + "SearchBackgroundJobs": { + "methods": [ + "search_background_jobs" + ] + }, + "SeedConversionWorkspace": { + "methods": [ + "seed_conversion_workspace" + ] + }, "StartMigrationJob": { "methods": [ "start_migration_job" @@ -85,6 +175,11 @@ "update_connection_profile" ] }, + "UpdateConversionWorkspace": { + "methods": [ + "update_conversion_workspace" + ] + }, "UpdateMigrationJob": { "methods": [ "update_migration_job" @@ -100,26 +195,76 @@ "grpc-async": { "libraryClient": "DataMigrationServiceAsyncClient", "rpcs": { + "ApplyConversionWorkspace": { + "methods": [ + "apply_conversion_workspace" + ] + }, + "CommitConversionWorkspace": { + "methods": [ + "commit_conversion_workspace" + ] + }, + "ConvertConversionWorkspace": { + "methods": [ + "convert_conversion_workspace" + ] + }, "CreateConnectionProfile": { "methods": [ "create_connection_profile" ] }, + "CreateConversionWorkspace": { + "methods": [ + "create_conversion_workspace" + ] + }, "CreateMigrationJob": { "methods": [ "create_migration_job" ] }, + "CreatePrivateConnection": { + "methods": [ + "create_private_connection" + ] + }, "DeleteConnectionProfile": { "methods": [ "delete_connection_profile" ] }, + "DeleteConversionWorkspace": { + "methods": [ + "delete_conversion_workspace" + ] + }, "DeleteMigrationJob": { "methods": [ "delete_migration_job" ] }, + "DeletePrivateConnection": { + "methods": [ + "delete_private_connection" + ] + }, + "DescribeConversionWorkspaceRevisions": { + "methods": [ + "describe_conversion_workspace_revisions" + ] + }, + "DescribeDatabaseEntities": { + "methods": [ + "describe_database_entities" + ] + }, + "FetchStaticIps": { + "methods": [ + "fetch_static_ips" + ] + }, "GenerateSshScript": { "methods": [ "generate_ssh_script" @@ -130,21 +275,46 @@ "get_connection_profile" ] }, + "GetConversionWorkspace": { + "methods": [ + "get_conversion_workspace" + ] + }, "GetMigrationJob": { "methods": [ "get_migration_job" ] }, + "GetPrivateConnection": { + "methods": [ + "get_private_connection" + ] + }, + "ImportMappingRules": { + "methods": [ + "import_mapping_rules" + ] + }, "ListConnectionProfiles": { "methods": [ "list_connection_profiles" ] }, + "ListConversionWorkspaces": { + "methods": [ + "list_conversion_workspaces" + ] + }, "ListMigrationJobs": { "methods": [ "list_migration_jobs" ] }, + "ListPrivateConnections": { + "methods": [ + "list_private_connections" + ] + }, "PromoteMigrationJob": { "methods": [ "promote_migration_job" @@ -160,6 +330,21 @@ "resume_migration_job" ] }, + "RollbackConversionWorkspace": { + "methods": [ + "rollback_conversion_workspace" + ] + }, + "SearchBackgroundJobs": { + "methods": [ + "search_background_jobs" + ] + }, + "SeedConversionWorkspace": { + "methods": [ + "seed_conversion_workspace" + ] + }, "StartMigrationJob": { "methods": [ "start_migration_job" @@ -175,6 +360,11 @@ "update_connection_profile" ] }, + "UpdateConversionWorkspace": { + "methods": [ + "update_conversion_workspace" + ] + }, "UpdateMigrationJob": { "methods": [ "update_migration_job" diff --git a/google/cloud/clouddms_v1/services/data_migration_service/async_client.py b/google/cloud/clouddms_v1/services/data_migration_service/async_client.py index 0a271e1..110fb2a 100644 --- a/google/cloud/clouddms_v1/services/data_migration_service/async_client.py +++ b/google/cloud/clouddms_v1/services/data_migration_service/async_client.py @@ -44,6 +44,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -51,7 +55,11 @@ from google.rpc import status_pb2 # type: ignore from google.cloud.clouddms_v1.services.data_migration_service import pagers -from google.cloud.clouddms_v1.types import clouddms, clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms, + clouddms_resources, + conversionworkspace_resources, +) from .client import DataMigrationServiceClient from .transports.base import DEFAULT_CLIENT_INFO, DataMigrationServiceTransport @@ -72,10 +80,24 @@ class DataMigrationServiceAsyncClient: parse_connection_profile_path = staticmethod( DataMigrationServiceClient.parse_connection_profile_path ) + conversion_workspace_path = staticmethod( + DataMigrationServiceClient.conversion_workspace_path + ) + parse_conversion_workspace_path = staticmethod( + DataMigrationServiceClient.parse_conversion_workspace_path + ) migration_job_path = staticmethod(DataMigrationServiceClient.migration_job_path) parse_migration_job_path = staticmethod( DataMigrationServiceClient.parse_migration_job_path ) + networks_path = staticmethod(DataMigrationServiceClient.networks_path) + parse_networks_path = staticmethod(DataMigrationServiceClient.parse_networks_path) + private_connection_path = staticmethod( + DataMigrationServiceClient.private_connection_path + ) + parse_private_connection_path = staticmethod( + DataMigrationServiceClient.parse_private_connection_path + ) common_billing_account_path = staticmethod( DataMigrationServiceClient.common_billing_account_path ) @@ -271,10 +293,10 @@ async def sample_list_migration_jobs(): Args: request (Optional[Union[google.cloud.clouddms_v1.types.ListMigrationJobsRequest, dict]]): - The request object. Retrieve a list of all migration jobs - in a given project and location. + The request object. Retrieves a list of all migration + jobs in a given project and location. parent (:class:`str`): - Required. The parent, which owns this + Required. The parent which owns this collection of migrationJobs. This corresponds to the ``parent`` field @@ -507,7 +529,7 @@ async def sample_create_migration_job(): Database Migration Service migration job in the specified project and region. parent (:class:`str`): - Required. The parent, which owns this + Required. The parent which owns this collection of migration jobs. This corresponds to the ``parent`` field @@ -660,9 +682,9 @@ async def sample_update_migration_job(): should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Field mask is used to - specify the fields to be overwritten in - the migration job resource by the - update. + specify the fields to be overwritten by + the update in the conversion workspace + resource. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1534,8 +1556,8 @@ async def list_connection_profiles( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListConnectionProfilesAsyncPager: - r"""Retrieve a list of all connection profiles in a given - project and location. + r"""Retrieves a list of all connection profiles in a + given project and location. .. code-block:: python @@ -1569,7 +1591,7 @@ async def sample_list_connection_profiles(): The request object. Request message for 'ListConnectionProfiles' request. parent (:class:`str`): - Required. The parent, which owns this + Required. The parent which owns this collection of connection profiles. This corresponds to the ``parent`` field @@ -1798,7 +1820,7 @@ async def sample_create_connection_profile(): The request object. Request message for 'CreateConnectionProfile' request. parent (:class:`str`): - Required. The parent, which owns this + Required. The parent which owns this collection of connection profiles. This corresponds to the ``parent`` field @@ -1949,9 +1971,9 @@ async def sample_update_connection_profile(): should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Field mask is used to - specify the fields to be overwritten in - the connection profile resource by the - update. + specify the fields to be overwritten by + the update in the conversion workspace + resource. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2152,6 +2174,2737 @@ async def sample_delete_connection_profile(): # Done; return the response. return response + async def create_private_connection( + self, + request: Optional[Union[clouddms.CreatePrivateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + private_connection: Optional[clouddms_resources.PrivateConnection] = None, + private_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new private connection in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_create_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + private_connection = clouddms_v1.PrivateConnection() + private_connection.vpc_peering_config.vpc_name = "vpc_name_value" + private_connection.vpc_peering_config.subnet = "subnet_value" + + request = clouddms_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest, dict]]): + The request object. Request message to create a new + private connection in the specified + project and region. + parent (:class:`str`): + Required. The parent that owns the + collection of PrivateConnections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection (:class:`google.cloud.clouddms_v1.types.PrivateConnection`): + Required. The private connection + resource to create. + + This corresponds to the ``private_connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection_id (:class:`str`): + Required. The private connection + identifier. + + This corresponds to the ``private_connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.clouddms_v1.types.PrivateConnection` The PrivateConnection resource is used to establish private connectivity + with the customer's network. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, private_connection, private_connection_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.CreatePrivateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if private_connection is not None: + request.private_connection = private_connection + if private_connection_id is not None: + request.private_connection_id = private_connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_private_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.PrivateConnection, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_private_connection( + self, + request: Optional[Union[clouddms.GetPrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.PrivateConnection: + r"""Gets details of a single private connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_get_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetPrivateConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_private_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GetPrivateConnectionRequest, dict]]): + The request object. Request message to get a private + connection resource. + name (:class:`str`): + Required. The name of the private + connection to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.PrivateConnection: + The PrivateConnection resource is + used to establish private connectivity + with the customer's network. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.GetPrivateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_private_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_private_connections( + self, + request: Optional[Union[clouddms.ListPrivateConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPrivateConnectionsAsyncPager: + r"""Retrieves a list of private connections in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_list_private_connections(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListPrivateConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_private_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest, dict]]): + The request object. Request message to retrieve a list of + private connections in a given project + and location. + parent (:class:`str`): + Required. The parent that owns the + collection of private connections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsAsyncPager: + Response message for + 'ListPrivateConnections' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.ListPrivateConnectionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_private_connections, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPrivateConnectionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_private_connection( + self, + request: Optional[Union[clouddms.DeletePrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Database Migration Service private + connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_delete_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeletePrivateConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest, dict]]): + The request object. Request message to delete a private + connection. + name (:class:`str`): + Required. The name of the private + connection to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.DeletePrivateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_private_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_conversion_workspace( + self, + request: Optional[Union[clouddms.GetConversionWorkspaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversionworkspace_resources.ConversionWorkspace: + r"""Gets details of a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_get_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conversion_workspace(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'GetConversionWorkspace' request. + name (:class:`str`): + Required. Name of the conversion + workspace resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.ConversionWorkspace: + The main conversion workspace + resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.GetConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_conversion_workspaces( + self, + request: Optional[Union[clouddms.ListConversionWorkspacesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConversionWorkspacesAsyncPager: + r"""Lists conversion workspaces in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_list_conversion_workspaces(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConversionWorkspacesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversion_workspaces(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest, dict]]): + The request object. Retrieve a list of all conversion + workspaces in a given project and + location. + parent (:class:`str`): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesAsyncPager: + Response message for + 'ListConversionWorkspaces' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.ListConversionWorkspacesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_conversion_workspaces, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListConversionWorkspacesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_conversion_workspace( + self, + request: Optional[ + Union[clouddms.CreateConversionWorkspaceRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + conversion_workspace: Optional[ + conversionworkspace_resources.ConversionWorkspace + ] = None, + conversion_workspace_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new conversion workspace in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_create_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.CreateConversionWorkspaceRequest( + parent="parent_value", + conversion_workspace_id="conversion_workspace_id_value", + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.create_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest, dict]]): + The request object. Request message to create a new + Conversion Workspace in the specified + project and region. + parent (:class:`str`): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace (:class:`google.cloud.clouddms_v1.types.ConversionWorkspace`): + Required. Represents a conversion + workspace object. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace_id (:class:`str`): + Required. The ID of the conversion + workspace to create. + + This corresponds to the ``conversion_workspace_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, conversion_workspace, conversion_workspace_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.CreateConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if conversion_workspace_id is not None: + request.conversion_workspace_id = conversion_workspace_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_conversion_workspace( + self, + request: Optional[ + Union[clouddms.UpdateConversionWorkspaceRequest, dict] + ] = None, + *, + conversion_workspace: Optional[ + conversionworkspace_resources.ConversionWorkspace + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single conversion + workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_update_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.UpdateConversionWorkspaceRequest( + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.update_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'UpdateConversionWorkspace' request. + conversion_workspace (:class:`google.cloud.clouddms_v1.types.ConversionWorkspace`): + Required. The conversion workspace + parameters to update. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([conversion_workspace, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.UpdateConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace.name", request.conversion_workspace.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_conversion_workspace( + self, + request: Optional[ + Union[clouddms.DeleteConversionWorkspaceRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_delete_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'DeleteConversionWorkspace' request. + name (:class:`str`): + Required. Name of the conversion + workspace resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.DeleteConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def seed_conversion_workspace( + self, + request: Optional[Union[clouddms.SeedConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports a snapshot of the source database into the + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_seed_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.SeedConversionWorkspaceRequest( + source_connection_profile="source_connection_profile_value", + ) + + # Make the request + operation = client.seed_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'SeedConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.SeedConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.seed_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def import_mapping_rules( + self, + request: Optional[Union[clouddms.ImportMappingRulesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_import_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ImportMappingRulesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.import_mapping_rules(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ImportMappingRulesRequest, dict]]): + The request object. Request message for + 'ImportMappingRules' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.ImportMappingRulesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_mapping_rules, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def convert_conversion_workspace( + self, + request: Optional[ + Union[clouddms.ConvertConversionWorkspaceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a draft tree schema for the destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_convert_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ConvertConversionWorkspaceRequest( + ) + + # Make the request + operation = client.convert_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'ConvertConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.ConvertConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.convert_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def commit_conversion_workspace( + self, + request: Optional[ + Union[clouddms.CommitConversionWorkspaceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Marks all the data in the conversion workspace as + committed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_commit_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.CommitConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.commit_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'CommitConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.CommitConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.commit_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def rollback_conversion_workspace( + self, + request: Optional[ + Union[clouddms.RollbackConversionWorkspaceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Rolls back a conversion workspace to the last + committed snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_rollback_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.RollbackConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'RollbackConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.RollbackConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rollback_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def apply_conversion_workspace( + self, + request: Optional[Union[clouddms.ApplyConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Applies draft tree onto a specific destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_apply_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ApplyConversionWorkspaceRequest( + connection_profile="connection_profile_value", + name="name_value", + ) + + # Make the request + operation = client.apply_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'ApplyConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.ApplyConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.apply_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def describe_database_entities( + self, + request: Optional[Union[clouddms.DescribeDatabaseEntitiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.DescribeDatabaseEntitiesAsyncPager: + r"""Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_describe_database_entities(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeDatabaseEntitiesRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + page_result = client.describe_database_entities(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest, dict]]): + The request object. Request message for + 'DescribeDatabaseEntities' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesAsyncPager: + Response message for + 'DescribeDatabaseEntities' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = clouddms.DescribeDatabaseEntitiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.describe_database_entities, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace", request.conversion_workspace),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.DescribeDatabaseEntitiesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def search_background_jobs( + self, + request: Optional[Union[clouddms.SearchBackgroundJobsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.SearchBackgroundJobsResponse: + r"""Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_search_background_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.SearchBackgroundJobsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = await client.search_background_jobs(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest, dict]]): + The request object. Request message for + 'SearchBackgroundJobs' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse: + Response message for + 'SearchBackgroundJobs' request. + + """ + # Create or coerce a protobuf request object. + request = clouddms.SearchBackgroundJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_background_jobs, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace", request.conversion_workspace),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def describe_conversion_workspace_revisions( + self, + request: Optional[ + Union[clouddms.DescribeConversionWorkspaceRevisionsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.DescribeConversionWorkspaceRevisionsResponse: + r"""Retrieves a list of committed revisions of a specific + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_describe_conversion_workspace_revisions(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = await client.describe_conversion_workspace_revisions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest, dict]]): + The request object. Request message for + 'DescribeConversionWorkspaceRevisions' + request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse: + Response message for + 'DescribeConversionWorkspaceRevisions' + request. + + """ + # Create or coerce a protobuf request object. + request = clouddms.DescribeConversionWorkspaceRevisionsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.describe_conversion_workspace_revisions, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace", request.conversion_workspace),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_static_ips( + self, + request: Optional[Union[clouddms.FetchStaticIpsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchStaticIpsAsyncPager: + r"""Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_fetch_static_ips(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.FetchStaticIpsRequest( + name="name_value", + ) + + # Make the request + page_result = client.fetch_static_ips(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.FetchStaticIpsRequest, dict]]): + The request object. Request message for 'FetchStaticIps' + request. + name (:class:`str`): + Required. The resource name for the location for which + static IPs should be returned. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsAsyncPager: + Response message for a + 'FetchStaticIps' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.FetchStaticIpsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_static_ips, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchStaticIpsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/google/cloud/clouddms_v1/services/data_migration_service/client.py b/google/cloud/clouddms_v1/services/data_migration_service/client.py index e5188db..2ba298b 100644 --- a/google/cloud/clouddms_v1/services/data_migration_service/client.py +++ b/google/cloud/clouddms_v1/services/data_migration_service/client.py @@ -48,6 +48,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -55,7 +59,11 @@ from google.rpc import status_pb2 # type: ignore from google.cloud.clouddms_v1.services.data_migration_service import pagers -from google.cloud.clouddms_v1.types import clouddms, clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms, + clouddms_resources, + conversionworkspace_resources, +) from .transports.base import DEFAULT_CLIENT_INFO, DataMigrationServiceTransport from .transports.grpc import DataMigrationServiceGrpcTransport @@ -205,6 +213,28 @@ def parse_connection_profile_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def conversion_workspace_path( + project: str, + location: str, + conversion_workspace: str, + ) -> str: + """Returns a fully-qualified conversion_workspace string.""" + return "projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}".format( + project=project, + location=location, + conversion_workspace=conversion_workspace, + ) + + @staticmethod + def parse_conversion_workspace_path(path: str) -> Dict[str, str]: + """Parses a conversion_workspace path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/conversionWorkspaces/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def migration_job_path( project: str, @@ -227,6 +257,47 @@ def parse_migration_job_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def networks_path( + project: str, + network: str, + ) -> str: + """Returns a fully-qualified networks string.""" + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + + @staticmethod + def parse_networks_path(path: str) -> Dict[str, str]: + """Parses a networks path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def private_connection_path( + project: str, + location: str, + private_connection: str, + ) -> str: + """Returns a fully-qualified private_connection string.""" + return "projects/{project}/locations/{location}/privateConnections/{private_connection}".format( + project=project, + location=location, + private_connection=private_connection, + ) + + @staticmethod + def parse_private_connection_path(path: str) -> Dict[str, str]: + """Parses a private_connection path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/privateConnections/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -511,10 +582,10 @@ def sample_list_migration_jobs(): Args: request (Union[google.cloud.clouddms_v1.types.ListMigrationJobsRequest, dict]): - The request object. Retrieve a list of all migration jobs - in a given project and location. + The request object. Retrieves a list of all migration + jobs in a given project and location. parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of migrationJobs. This corresponds to the ``parent`` field @@ -747,7 +818,7 @@ def sample_create_migration_job(): Database Migration Service migration job in the specified project and region. parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of migration jobs. This corresponds to the ``parent`` field @@ -900,9 +971,9 @@ def sample_update_migration_job(): should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to - specify the fields to be overwritten in - the migration job resource by the - update. + specify the fields to be overwritten by + the update in the conversion workspace + resource. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1781,8 +1852,8 @@ def list_connection_profiles( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListConnectionProfilesPager: - r"""Retrieve a list of all connection profiles in a given - project and location. + r"""Retrieves a list of all connection profiles in a + given project and location. .. code-block:: python @@ -1816,7 +1887,7 @@ def sample_list_connection_profiles(): The request object. Request message for 'ListConnectionProfiles' request. parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of connection profiles. This corresponds to the ``parent`` field @@ -2045,7 +2116,7 @@ def sample_create_connection_profile(): The request object. Request message for 'CreateConnectionProfile' request. parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of connection profiles. This corresponds to the ``parent`` field @@ -2198,9 +2269,9 @@ def sample_update_connection_profile(): should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to - specify the fields to be overwritten in - the connection profile resource by the - update. + specify the fields to be overwritten by + the update in the conversion workspace + resource. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2405,18 +2476,2786 @@ def sample_delete_connection_profile(): # Done; return the response. return response - def __enter__(self) -> "DataMigrationServiceClient": - return self + def create_private_connection( + self, + request: Optional[Union[clouddms.CreatePrivateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + private_connection: Optional[clouddms_resources.PrivateConnection] = None, + private_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new private connection in a given project + and location. - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_create_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + private_connection = clouddms_v1.PrivateConnection() + private_connection.vpc_peering_config.vpc_name = "vpc_name_value" + private_connection.vpc_peering_config.subnet = "subnet_value" + + request = clouddms_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest, dict]): + The request object. Request message to create a new + private connection in the specified + project and region. + parent (str): + Required. The parent that owns the + collection of PrivateConnections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection (google.cloud.clouddms_v1.types.PrivateConnection): + Required. The private connection + resource to create. + + This corresponds to the ``private_connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection_id (str): + Required. The private connection + identifier. + + This corresponds to the ``private_connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.clouddms_v1.types.PrivateConnection` The PrivateConnection resource is used to establish private connectivity + with the customer's network. - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! """ - self.transport.close() + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, private_connection, private_connection_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.CreatePrivateConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.CreatePrivateConnectionRequest): + request = clouddms.CreatePrivateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if private_connection is not None: + request.private_connection = private_connection + if private_connection_id is not None: + request.private_connection_id = private_connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_private_connection + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.PrivateConnection, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_private_connection( + self, + request: Optional[Union[clouddms.GetPrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.PrivateConnection: + r"""Gets details of a single private connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_get_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetPrivateConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_private_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GetPrivateConnectionRequest, dict]): + The request object. Request message to get a private + connection resource. + name (str): + Required. The name of the private + connection to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.PrivateConnection: + The PrivateConnection resource is + used to establish private connectivity + with the customer's network. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.GetPrivateConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.GetPrivateConnectionRequest): + request = clouddms.GetPrivateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_private_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_private_connections( + self, + request: Optional[Union[clouddms.ListPrivateConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPrivateConnectionsPager: + r"""Retrieves a list of private connections in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_list_private_connections(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListPrivateConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_private_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest, dict]): + The request object. Request message to retrieve a list of + private connections in a given project + and location. + parent (str): + Required. The parent that owns the + collection of private connections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsPager: + Response message for + 'ListPrivateConnections' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ListPrivateConnectionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ListPrivateConnectionsRequest): + request = clouddms.ListPrivateConnectionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_private_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPrivateConnectionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_private_connection( + self, + request: Optional[Union[clouddms.DeletePrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Database Migration Service private + connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_delete_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeletePrivateConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest, dict]): + The request object. Request message to delete a private + connection. + name (str): + Required. The name of the private + connection to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.DeletePrivateConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.DeletePrivateConnectionRequest): + request = clouddms.DeletePrivateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_private_connection + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_conversion_workspace( + self, + request: Optional[Union[clouddms.GetConversionWorkspaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversionworkspace_resources.ConversionWorkspace: + r"""Gets details of a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_get_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = client.get_conversion_workspace(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest, dict]): + The request object. Request message for + 'GetConversionWorkspace' request. + name (str): + Required. Name of the conversion + workspace resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.ConversionWorkspace: + The main conversion workspace + resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.GetConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.GetConversionWorkspaceRequest): + request = clouddms.GetConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_conversion_workspaces( + self, + request: Optional[Union[clouddms.ListConversionWorkspacesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConversionWorkspacesPager: + r"""Lists conversion workspaces in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_list_conversion_workspaces(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConversionWorkspacesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversion_workspaces(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest, dict]): + The request object. Retrieve a list of all conversion + workspaces in a given project and + location. + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesPager: + Response message for + 'ListConversionWorkspaces' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ListConversionWorkspacesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ListConversionWorkspacesRequest): + request = clouddms.ListConversionWorkspacesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_conversion_workspaces + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListConversionWorkspacesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_conversion_workspace( + self, + request: Optional[ + Union[clouddms.CreateConversionWorkspaceRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + conversion_workspace: Optional[ + conversionworkspace_resources.ConversionWorkspace + ] = None, + conversion_workspace_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new conversion workspace in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_create_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.CreateConversionWorkspaceRequest( + parent="parent_value", + conversion_workspace_id="conversion_workspace_id_value", + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.create_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest, dict]): + The request object. Request message to create a new + Conversion Workspace in the specified + project and region. + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. Represents a conversion + workspace object. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace_id (str): + Required. The ID of the conversion + workspace to create. + + This corresponds to the ``conversion_workspace_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, conversion_workspace, conversion_workspace_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.CreateConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.CreateConversionWorkspaceRequest): + request = clouddms.CreateConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if conversion_workspace_id is not None: + request.conversion_workspace_id = conversion_workspace_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_conversion_workspace( + self, + request: Optional[ + Union[clouddms.UpdateConversionWorkspaceRequest, dict] + ] = None, + *, + conversion_workspace: Optional[ + conversionworkspace_resources.ConversionWorkspace + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single conversion + workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_update_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.UpdateConversionWorkspaceRequest( + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.update_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest, dict]): + The request object. Request message for + 'UpdateConversionWorkspace' request. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. The conversion workspace + parameters to update. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([conversion_workspace, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.UpdateConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.UpdateConversionWorkspaceRequest): + request = clouddms.UpdateConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace.name", request.conversion_workspace.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_conversion_workspace( + self, + request: Optional[ + Union[clouddms.DeleteConversionWorkspaceRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_delete_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest, dict]): + The request object. Request message for + 'DeleteConversionWorkspace' request. + name (str): + Required. Name of the conversion + workspace resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.DeleteConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.DeleteConversionWorkspaceRequest): + request = clouddms.DeleteConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def seed_conversion_workspace( + self, + request: Optional[Union[clouddms.SeedConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports a snapshot of the source database into the + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_seed_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.SeedConversionWorkspaceRequest( + source_connection_profile="source_connection_profile_value", + ) + + # Make the request + operation = client.seed_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest, dict]): + The request object. Request message for + 'SeedConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.SeedConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.SeedConversionWorkspaceRequest): + request = clouddms.SeedConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.seed_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def import_mapping_rules( + self, + request: Optional[Union[clouddms.ImportMappingRulesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_import_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ImportMappingRulesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.import_mapping_rules(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ImportMappingRulesRequest, dict]): + The request object. Request message for + 'ImportMappingRules' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ImportMappingRulesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ImportMappingRulesRequest): + request = clouddms.ImportMappingRulesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_mapping_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def convert_conversion_workspace( + self, + request: Optional[ + Union[clouddms.ConvertConversionWorkspaceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a draft tree schema for the destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_convert_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ConvertConversionWorkspaceRequest( + ) + + # Make the request + operation = client.convert_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest, dict]): + The request object. Request message for + 'ConvertConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ConvertConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ConvertConversionWorkspaceRequest): + request = clouddms.ConvertConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.convert_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def commit_conversion_workspace( + self, + request: Optional[ + Union[clouddms.CommitConversionWorkspaceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Marks all the data in the conversion workspace as + committed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_commit_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.CommitConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.commit_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest, dict]): + The request object. Request message for + 'CommitConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.CommitConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.CommitConversionWorkspaceRequest): + request = clouddms.CommitConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.commit_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def rollback_conversion_workspace( + self, + request: Optional[ + Union[clouddms.RollbackConversionWorkspaceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Rolls back a conversion workspace to the last + committed snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_rollback_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.RollbackConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest, dict]): + The request object. Request message for + 'RollbackConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.RollbackConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.RollbackConversionWorkspaceRequest): + request = clouddms.RollbackConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.rollback_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def apply_conversion_workspace( + self, + request: Optional[Union[clouddms.ApplyConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Applies draft tree onto a specific destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_apply_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ApplyConversionWorkspaceRequest( + connection_profile="connection_profile_value", + name="name_value", + ) + + # Make the request + operation = client.apply_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest, dict]): + The request object. Request message for + 'ApplyConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ApplyConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ApplyConversionWorkspaceRequest): + request = clouddms.ApplyConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.apply_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def describe_database_entities( + self, + request: Optional[Union[clouddms.DescribeDatabaseEntitiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.DescribeDatabaseEntitiesPager: + r"""Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_describe_database_entities(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeDatabaseEntitiesRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + page_result = client.describe_database_entities(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest, dict]): + The request object. Request message for + 'DescribeDatabaseEntities' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesPager: + Response message for + 'DescribeDatabaseEntities' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.DescribeDatabaseEntitiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.DescribeDatabaseEntitiesRequest): + request = clouddms.DescribeDatabaseEntitiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.describe_database_entities + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace", request.conversion_workspace),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.DescribeDatabaseEntitiesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def search_background_jobs( + self, + request: Optional[Union[clouddms.SearchBackgroundJobsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.SearchBackgroundJobsResponse: + r"""Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_search_background_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.SearchBackgroundJobsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = client.search_background_jobs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest, dict]): + The request object. Request message for + 'SearchBackgroundJobs' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse: + Response message for + 'SearchBackgroundJobs' request. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.SearchBackgroundJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.SearchBackgroundJobsRequest): + request = clouddms.SearchBackgroundJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_background_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace", request.conversion_workspace),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def describe_conversion_workspace_revisions( + self, + request: Optional[ + Union[clouddms.DescribeConversionWorkspaceRevisionsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.DescribeConversionWorkspaceRevisionsResponse: + r"""Retrieves a list of committed revisions of a specific + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_describe_conversion_workspace_revisions(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = client.describe_conversion_workspace_revisions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest, dict]): + The request object. Request message for + 'DescribeConversionWorkspaceRevisions' + request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse: + Response message for + 'DescribeConversionWorkspaceRevisions' + request. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.DescribeConversionWorkspaceRevisionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, clouddms.DescribeConversionWorkspaceRevisionsRequest + ): + request = clouddms.DescribeConversionWorkspaceRevisionsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.describe_conversion_workspace_revisions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace", request.conversion_workspace),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_static_ips( + self, + request: Optional[Union[clouddms.FetchStaticIpsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchStaticIpsPager: + r"""Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_fetch_static_ips(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.FetchStaticIpsRequest( + name="name_value", + ) + + # Make the request + page_result = client.fetch_static_ips(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.FetchStaticIpsRequest, dict]): + The request object. Request message for 'FetchStaticIps' + request. + name (str): + Required. The resource name for the location for which + static IPs should be returned. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsPager: + Response message for a + 'FetchStaticIps' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.FetchStaticIpsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.FetchStaticIpsRequest): + request = clouddms.FetchStaticIpsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_static_ips] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchStaticIpsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DataMigrationServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/clouddms_v1/services/data_migration_service/pagers.py b/google/cloud/clouddms_v1/services/data_migration_service/pagers.py index ff6a717..63b5593 100644 --- a/google/cloud/clouddms_v1/services/data_migration_service/pagers.py +++ b/google/cloud/clouddms_v1/services/data_migration_service/pagers.py @@ -24,7 +24,11 @@ Tuple, ) -from google.cloud.clouddms_v1.types import clouddms, clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms, + clouddms_resources, + conversionworkspace_resources, +) class ListMigrationJobsPager: @@ -281,3 +285,517 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPrivateConnectionsPager: + """A pager for iterating through ``list_private_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``private_connections`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPrivateConnections`` requests and continue to iterate + through the ``private_connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., clouddms.ListPrivateConnectionsResponse], + request: clouddms.ListPrivateConnectionsRequest, + response: clouddms.ListPrivateConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListPrivateConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.ListPrivateConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[clouddms_resources.PrivateConnection]: + for page in self.pages: + yield from page.private_connections + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPrivateConnectionsAsyncPager: + """A pager for iterating through ``list_private_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``private_connections`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPrivateConnections`` requests and continue to iterate + through the ``private_connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[clouddms.ListPrivateConnectionsResponse]], + request: clouddms.ListPrivateConnectionsRequest, + response: clouddms.ListPrivateConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListPrivateConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.ListPrivateConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[clouddms_resources.PrivateConnection]: + async def async_generator(): + async for page in self.pages: + for response in page.private_connections: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListConversionWorkspacesPager: + """A pager for iterating through ``list_conversion_workspaces`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``conversion_workspaces`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListConversionWorkspaces`` requests and continue to iterate + through the ``conversion_workspaces`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., clouddms.ListConversionWorkspacesResponse], + request: clouddms.ListConversionWorkspacesRequest, + response: clouddms.ListConversionWorkspacesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListConversionWorkspacesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.ListConversionWorkspacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[conversionworkspace_resources.ConversionWorkspace]: + for page in self.pages: + yield from page.conversion_workspaces + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListConversionWorkspacesAsyncPager: + """A pager for iterating through ``list_conversion_workspaces`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``conversion_workspaces`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListConversionWorkspaces`` requests and continue to iterate + through the ``conversion_workspaces`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[clouddms.ListConversionWorkspacesResponse]], + request: clouddms.ListConversionWorkspacesRequest, + response: clouddms.ListConversionWorkspacesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListConversionWorkspacesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.ListConversionWorkspacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterator[conversionworkspace_resources.ConversionWorkspace]: + async def async_generator(): + async for page in self.pages: + for response in page.conversion_workspaces: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class DescribeDatabaseEntitiesPager: + """A pager for iterating through ``describe_database_entities`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``database_entities`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``DescribeDatabaseEntities`` requests and continue to iterate + through the ``database_entities`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., clouddms.DescribeDatabaseEntitiesResponse], + request: clouddms.DescribeDatabaseEntitiesRequest, + response: clouddms.DescribeDatabaseEntitiesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.DescribeDatabaseEntitiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.DescribeDatabaseEntitiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[conversionworkspace_resources.DatabaseEntity]: + for page in self.pages: + yield from page.database_entities + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class DescribeDatabaseEntitiesAsyncPager: + """A pager for iterating through ``describe_database_entities`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``database_entities`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``DescribeDatabaseEntities`` requests and continue to iterate + through the ``database_entities`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[clouddms.DescribeDatabaseEntitiesResponse]], + request: clouddms.DescribeDatabaseEntitiesRequest, + response: clouddms.DescribeDatabaseEntitiesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.DescribeDatabaseEntitiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.DescribeDatabaseEntitiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[conversionworkspace_resources.DatabaseEntity]: + async def async_generator(): + async for page in self.pages: + for response in page.database_entities: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchStaticIpsPager: + """A pager for iterating through ``fetch_static_ips`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``static_ips`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchStaticIps`` requests and continue to iterate + through the ``static_ips`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., clouddms.FetchStaticIpsResponse], + request: clouddms.FetchStaticIpsRequest, + response: clouddms.FetchStaticIpsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.FetchStaticIpsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.FetchStaticIpsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.FetchStaticIpsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.FetchStaticIpsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[str]: + for page in self.pages: + yield from page.static_ips + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchStaticIpsAsyncPager: + """A pager for iterating through ``fetch_static_ips`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``static_ips`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchStaticIps`` requests and continue to iterate + through the ``static_ips`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[clouddms.FetchStaticIpsResponse]], + request: clouddms.FetchStaticIpsRequest, + response: clouddms.FetchStaticIpsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.FetchStaticIpsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.FetchStaticIpsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.FetchStaticIpsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.FetchStaticIpsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[str]: + async def async_generator(): + async for page in self.pages: + for response in page.static_ips: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py b/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py index 8eb6c8c..f3bf91e 100644 --- a/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py +++ b/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py @@ -22,11 +22,18 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.clouddms_v1 import gapic_version as package_version -from google.cloud.clouddms_v1.types import clouddms, clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms, + clouddms_resources, + conversionworkspace_resources, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -208,6 +215,101 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_private_connection: gapic_v1.method.wrap_method( + self.create_private_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.get_private_connection: gapic_v1.method.wrap_method( + self.get_private_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.list_private_connections: gapic_v1.method.wrap_method( + self.list_private_connections, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_private_connection: gapic_v1.method.wrap_method( + self.delete_private_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.get_conversion_workspace: gapic_v1.method.wrap_method( + self.get_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.list_conversion_workspaces: gapic_v1.method.wrap_method( + self.list_conversion_workspaces, + default_timeout=60.0, + client_info=client_info, + ), + self.create_conversion_workspace: gapic_v1.method.wrap_method( + self.create_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.update_conversion_workspace: gapic_v1.method.wrap_method( + self.update_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_conversion_workspace: gapic_v1.method.wrap_method( + self.delete_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.seed_conversion_workspace: gapic_v1.method.wrap_method( + self.seed_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.import_mapping_rules: gapic_v1.method.wrap_method( + self.import_mapping_rules, + default_timeout=60.0, + client_info=client_info, + ), + self.convert_conversion_workspace: gapic_v1.method.wrap_method( + self.convert_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.commit_conversion_workspace: gapic_v1.method.wrap_method( + self.commit_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.rollback_conversion_workspace: gapic_v1.method.wrap_method( + self.rollback_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.apply_conversion_workspace: gapic_v1.method.wrap_method( + self.apply_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.describe_database_entities: gapic_v1.method.wrap_method( + self.describe_database_entities, + default_timeout=60.0, + client_info=client_info, + ), + self.search_background_jobs: gapic_v1.method.wrap_method( + self.search_background_jobs, + default_timeout=60.0, + client_info=client_info, + ), + self.describe_conversion_workspace_revisions: gapic_v1.method.wrap_method( + self.describe_conversion_workspace_revisions, + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_static_ips: gapic_v1.method.wrap_method( + self.fetch_static_ips, + default_timeout=60.0, + client_info=client_info, + ), } def close(self): @@ -388,6 +490,284 @@ def delete_connection_profile( ]: raise NotImplementedError() + @property + def create_private_connection( + self, + ) -> Callable[ + [clouddms.CreatePrivateConnectionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_private_connection( + self, + ) -> Callable[ + [clouddms.GetPrivateConnectionRequest], + Union[ + clouddms_resources.PrivateConnection, + Awaitable[clouddms_resources.PrivateConnection], + ], + ]: + raise NotImplementedError() + + @property + def list_private_connections( + self, + ) -> Callable[ + [clouddms.ListPrivateConnectionsRequest], + Union[ + clouddms.ListPrivateConnectionsResponse, + Awaitable[clouddms.ListPrivateConnectionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_private_connection( + self, + ) -> Callable[ + [clouddms.DeletePrivateConnectionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_conversion_workspace( + self, + ) -> Callable[ + [clouddms.GetConversionWorkspaceRequest], + Union[ + conversionworkspace_resources.ConversionWorkspace, + Awaitable[conversionworkspace_resources.ConversionWorkspace], + ], + ]: + raise NotImplementedError() + + @property + def list_conversion_workspaces( + self, + ) -> Callable[ + [clouddms.ListConversionWorkspacesRequest], + Union[ + clouddms.ListConversionWorkspacesResponse, + Awaitable[clouddms.ListConversionWorkspacesResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_conversion_workspace( + self, + ) -> Callable[ + [clouddms.CreateConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_conversion_workspace( + self, + ) -> Callable[ + [clouddms.UpdateConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_conversion_workspace( + self, + ) -> Callable[ + [clouddms.DeleteConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def seed_conversion_workspace( + self, + ) -> Callable[ + [clouddms.SeedConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def import_mapping_rules( + self, + ) -> Callable[ + [clouddms.ImportMappingRulesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def convert_conversion_workspace( + self, + ) -> Callable[ + [clouddms.ConvertConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def commit_conversion_workspace( + self, + ) -> Callable[ + [clouddms.CommitConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def rollback_conversion_workspace( + self, + ) -> Callable[ + [clouddms.RollbackConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def apply_conversion_workspace( + self, + ) -> Callable[ + [clouddms.ApplyConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def describe_database_entities( + self, + ) -> Callable[ + [clouddms.DescribeDatabaseEntitiesRequest], + Union[ + clouddms.DescribeDatabaseEntitiesResponse, + Awaitable[clouddms.DescribeDatabaseEntitiesResponse], + ], + ]: + raise NotImplementedError() + + @property + def search_background_jobs( + self, + ) -> Callable[ + [clouddms.SearchBackgroundJobsRequest], + Union[ + clouddms.SearchBackgroundJobsResponse, + Awaitable[clouddms.SearchBackgroundJobsResponse], + ], + ]: + raise NotImplementedError() + + @property + def describe_conversion_workspace_revisions( + self, + ) -> Callable[ + [clouddms.DescribeConversionWorkspaceRevisionsRequest], + Union[ + clouddms.DescribeConversionWorkspaceRevisionsResponse, + Awaitable[clouddms.DescribeConversionWorkspaceRevisionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def fetch_static_ips( + self, + ) -> Callable[ + [clouddms.FetchStaticIpsRequest], + Union[ + clouddms.FetchStaticIpsResponse, Awaitable[clouddms.FetchStaticIpsResponse] + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py b/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py index 901808f..043ebc7 100644 --- a/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py +++ b/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py @@ -20,10 +20,17 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore -from google.cloud.clouddms_v1.types import clouddms, clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms, + clouddms_resources, + conversionworkspace_resources, +) from .base import DEFAULT_CLIENT_INFO, DataMigrationServiceTransport @@ -576,8 +583,8 @@ def list_connection_profiles( ]: r"""Return a callable for the list connection profiles method over gRPC. - Retrieve a list of all connection profiles in a given - project and location. + Retrieves a list of all connection profiles in a + given project and location. Returns: Callable[[~.ListConnectionProfilesRequest], @@ -708,9 +715,753 @@ def delete_connection_profile( ) return self._stubs["delete_connection_profile"] + @property + def create_private_connection( + self, + ) -> Callable[[clouddms.CreatePrivateConnectionRequest], operations_pb2.Operation]: + r"""Return a callable for the create private connection method over gRPC. + + Creates a new private connection in a given project + and location. + + Returns: + Callable[[~.CreatePrivateConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_private_connection" not in self._stubs: + self._stubs["create_private_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/CreatePrivateConnection", + request_serializer=clouddms.CreatePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_private_connection"] + + @property + def get_private_connection( + self, + ) -> Callable[ + [clouddms.GetPrivateConnectionRequest], clouddms_resources.PrivateConnection + ]: + r"""Return a callable for the get private connection method over gRPC. + + Gets details of a single private connection. + + Returns: + Callable[[~.GetPrivateConnectionRequest], + ~.PrivateConnection]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_private_connection" not in self._stubs: + self._stubs["get_private_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/GetPrivateConnection", + request_serializer=clouddms.GetPrivateConnectionRequest.serialize, + response_deserializer=clouddms_resources.PrivateConnection.deserialize, + ) + return self._stubs["get_private_connection"] + + @property + def list_private_connections( + self, + ) -> Callable[ + [clouddms.ListPrivateConnectionsRequest], + clouddms.ListPrivateConnectionsResponse, + ]: + r"""Return a callable for the list private connections method over gRPC. + + Retrieves a list of private connections in a given + project and location. + + Returns: + Callable[[~.ListPrivateConnectionsRequest], + ~.ListPrivateConnectionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_private_connections" not in self._stubs: + self._stubs["list_private_connections"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ListPrivateConnections", + request_serializer=clouddms.ListPrivateConnectionsRequest.serialize, + response_deserializer=clouddms.ListPrivateConnectionsResponse.deserialize, + ) + return self._stubs["list_private_connections"] + + @property + def delete_private_connection( + self, + ) -> Callable[[clouddms.DeletePrivateConnectionRequest], operations_pb2.Operation]: + r"""Return a callable for the delete private connection method over gRPC. + + Deletes a single Database Migration Service private + connection. + + Returns: + Callable[[~.DeletePrivateConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_private_connection" not in self._stubs: + self._stubs["delete_private_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DeletePrivateConnection", + request_serializer=clouddms.DeletePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_private_connection"] + + @property + def get_conversion_workspace( + self, + ) -> Callable[ + [clouddms.GetConversionWorkspaceRequest], + conversionworkspace_resources.ConversionWorkspace, + ]: + r"""Return a callable for the get conversion workspace method over gRPC. + + Gets details of a single conversion workspace. + + Returns: + Callable[[~.GetConversionWorkspaceRequest], + ~.ConversionWorkspace]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_conversion_workspace" not in self._stubs: + self._stubs["get_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/GetConversionWorkspace", + request_serializer=clouddms.GetConversionWorkspaceRequest.serialize, + response_deserializer=conversionworkspace_resources.ConversionWorkspace.deserialize, + ) + return self._stubs["get_conversion_workspace"] + + @property + def list_conversion_workspaces( + self, + ) -> Callable[ + [clouddms.ListConversionWorkspacesRequest], + clouddms.ListConversionWorkspacesResponse, + ]: + r"""Return a callable for the list conversion workspaces method over gRPC. + + Lists conversion workspaces in a given project and + location. + + Returns: + Callable[[~.ListConversionWorkspacesRequest], + ~.ListConversionWorkspacesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_conversion_workspaces" not in self._stubs: + self._stubs["list_conversion_workspaces"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ListConversionWorkspaces", + request_serializer=clouddms.ListConversionWorkspacesRequest.serialize, + response_deserializer=clouddms.ListConversionWorkspacesResponse.deserialize, + ) + return self._stubs["list_conversion_workspaces"] + + @property + def create_conversion_workspace( + self, + ) -> Callable[ + [clouddms.CreateConversionWorkspaceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create conversion workspace method over gRPC. + + Creates a new conversion workspace in a given project + and location. + + Returns: + Callable[[~.CreateConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_conversion_workspace" not in self._stubs: + self._stubs["create_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/CreateConversionWorkspace", + request_serializer=clouddms.CreateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_conversion_workspace"] + + @property + def update_conversion_workspace( + self, + ) -> Callable[ + [clouddms.UpdateConversionWorkspaceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update conversion workspace method over gRPC. + + Updates the parameters of a single conversion + workspace. + + Returns: + Callable[[~.UpdateConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_conversion_workspace" not in self._stubs: + self._stubs["update_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/UpdateConversionWorkspace", + request_serializer=clouddms.UpdateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_conversion_workspace"] + + @property + def delete_conversion_workspace( + self, + ) -> Callable[ + [clouddms.DeleteConversionWorkspaceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete conversion workspace method over gRPC. + + Deletes a single conversion workspace. + + Returns: + Callable[[~.DeleteConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_conversion_workspace" not in self._stubs: + self._stubs["delete_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DeleteConversionWorkspace", + request_serializer=clouddms.DeleteConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_conversion_workspace"] + + @property + def seed_conversion_workspace( + self, + ) -> Callable[[clouddms.SeedConversionWorkspaceRequest], operations_pb2.Operation]: + r"""Return a callable for the seed conversion workspace method over gRPC. + + Imports a snapshot of the source database into the + conversion workspace. + + Returns: + Callable[[~.SeedConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "seed_conversion_workspace" not in self._stubs: + self._stubs["seed_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/SeedConversionWorkspace", + request_serializer=clouddms.SeedConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["seed_conversion_workspace"] + + @property + def import_mapping_rules( + self, + ) -> Callable[[clouddms.ImportMappingRulesRequest], operations_pb2.Operation]: + r"""Return a callable for the import mapping rules method over gRPC. + + Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + Returns: + Callable[[~.ImportMappingRulesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_mapping_rules" not in self._stubs: + self._stubs["import_mapping_rules"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ImportMappingRules", + request_serializer=clouddms.ImportMappingRulesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_mapping_rules"] + + @property + def convert_conversion_workspace( + self, + ) -> Callable[ + [clouddms.ConvertConversionWorkspaceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the convert conversion workspace method over gRPC. + + Creates a draft tree schema for the destination + database. + + Returns: + Callable[[~.ConvertConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "convert_conversion_workspace" not in self._stubs: + self._stubs["convert_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ConvertConversionWorkspace", + request_serializer=clouddms.ConvertConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["convert_conversion_workspace"] + + @property + def commit_conversion_workspace( + self, + ) -> Callable[ + [clouddms.CommitConversionWorkspaceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the commit conversion workspace method over gRPC. + + Marks all the data in the conversion workspace as + committed. + + Returns: + Callable[[~.CommitConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "commit_conversion_workspace" not in self._stubs: + self._stubs["commit_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/CommitConversionWorkspace", + request_serializer=clouddms.CommitConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["commit_conversion_workspace"] + + @property + def rollback_conversion_workspace( + self, + ) -> Callable[ + [clouddms.RollbackConversionWorkspaceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the rollback conversion workspace method over gRPC. + + Rolls back a conversion workspace to the last + committed snapshot. + + Returns: + Callable[[~.RollbackConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_conversion_workspace" not in self._stubs: + self._stubs[ + "rollback_conversion_workspace" + ] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/RollbackConversionWorkspace", + request_serializer=clouddms.RollbackConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["rollback_conversion_workspace"] + + @property + def apply_conversion_workspace( + self, + ) -> Callable[[clouddms.ApplyConversionWorkspaceRequest], operations_pb2.Operation]: + r"""Return a callable for the apply conversion workspace method over gRPC. + + Applies draft tree onto a specific destination + database. + + Returns: + Callable[[~.ApplyConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "apply_conversion_workspace" not in self._stubs: + self._stubs["apply_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ApplyConversionWorkspace", + request_serializer=clouddms.ApplyConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["apply_conversion_workspace"] + + @property + def describe_database_entities( + self, + ) -> Callable[ + [clouddms.DescribeDatabaseEntitiesRequest], + clouddms.DescribeDatabaseEntitiesResponse, + ]: + r"""Return a callable for the describe database entities method over gRPC. + + Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + Returns: + Callable[[~.DescribeDatabaseEntitiesRequest], + ~.DescribeDatabaseEntitiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "describe_database_entities" not in self._stubs: + self._stubs["describe_database_entities"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DescribeDatabaseEntities", + request_serializer=clouddms.DescribeDatabaseEntitiesRequest.serialize, + response_deserializer=clouddms.DescribeDatabaseEntitiesResponse.deserialize, + ) + return self._stubs["describe_database_entities"] + + @property + def search_background_jobs( + self, + ) -> Callable[ + [clouddms.SearchBackgroundJobsRequest], clouddms.SearchBackgroundJobsResponse + ]: + r"""Return a callable for the search background jobs method over gRPC. + + Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + Returns: + Callable[[~.SearchBackgroundJobsRequest], + ~.SearchBackgroundJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_background_jobs" not in self._stubs: + self._stubs["search_background_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/SearchBackgroundJobs", + request_serializer=clouddms.SearchBackgroundJobsRequest.serialize, + response_deserializer=clouddms.SearchBackgroundJobsResponse.deserialize, + ) + return self._stubs["search_background_jobs"] + + @property + def describe_conversion_workspace_revisions( + self, + ) -> Callable[ + [clouddms.DescribeConversionWorkspaceRevisionsRequest], + clouddms.DescribeConversionWorkspaceRevisionsResponse, + ]: + r"""Return a callable for the describe conversion workspace + revisions method over gRPC. + + Retrieves a list of committed revisions of a specific + conversion workspace. + + Returns: + Callable[[~.DescribeConversionWorkspaceRevisionsRequest], + ~.DescribeConversionWorkspaceRevisionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "describe_conversion_workspace_revisions" not in self._stubs: + self._stubs[ + "describe_conversion_workspace_revisions" + ] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DescribeConversionWorkspaceRevisions", + request_serializer=clouddms.DescribeConversionWorkspaceRevisionsRequest.serialize, + response_deserializer=clouddms.DescribeConversionWorkspaceRevisionsResponse.deserialize, + ) + return self._stubs["describe_conversion_workspace_revisions"] + + @property + def fetch_static_ips( + self, + ) -> Callable[[clouddms.FetchStaticIpsRequest], clouddms.FetchStaticIpsResponse]: + r"""Return a callable for the fetch static ips method over gRPC. + + Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + Returns: + Callable[[~.FetchStaticIpsRequest], + ~.FetchStaticIpsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_static_ips" not in self._stubs: + self._stubs["fetch_static_ips"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/FetchStaticIps", + request_serializer=clouddms.FetchStaticIpsRequest.serialize, + response_deserializer=clouddms.FetchStaticIpsResponse.deserialize, + ) + return self._stubs["fetch_static_ips"] + def close(self): self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + @property def kind(self) -> str: return "grpc" diff --git a/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py b/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py index 1942a50..376c302 100644 --- a/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py +++ b/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py @@ -19,11 +19,18 @@ from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.clouddms_v1.types import clouddms, clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms, + clouddms_resources, + conversionworkspace_resources, +) from .base import DEFAULT_CLIENT_INFO, DataMigrationServiceTransport from .grpc import DataMigrationServiceGrpcTransport @@ -602,8 +609,8 @@ def list_connection_profiles( ]: r"""Return a callable for the list connection profiles method over gRPC. - Retrieve a list of all connection profiles in a given - project and location. + Retrieves a list of all connection profiles in a + given project and location. Returns: Callable[[~.ListConnectionProfilesRequest], @@ -741,8 +748,768 @@ def delete_connection_profile( ) return self._stubs["delete_connection_profile"] + @property + def create_private_connection( + self, + ) -> Callable[ + [clouddms.CreatePrivateConnectionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create private connection method over gRPC. + + Creates a new private connection in a given project + and location. + + Returns: + Callable[[~.CreatePrivateConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_private_connection" not in self._stubs: + self._stubs["create_private_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/CreatePrivateConnection", + request_serializer=clouddms.CreatePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_private_connection"] + + @property + def get_private_connection( + self, + ) -> Callable[ + [clouddms.GetPrivateConnectionRequest], + Awaitable[clouddms_resources.PrivateConnection], + ]: + r"""Return a callable for the get private connection method over gRPC. + + Gets details of a single private connection. + + Returns: + Callable[[~.GetPrivateConnectionRequest], + Awaitable[~.PrivateConnection]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_private_connection" not in self._stubs: + self._stubs["get_private_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/GetPrivateConnection", + request_serializer=clouddms.GetPrivateConnectionRequest.serialize, + response_deserializer=clouddms_resources.PrivateConnection.deserialize, + ) + return self._stubs["get_private_connection"] + + @property + def list_private_connections( + self, + ) -> Callable[ + [clouddms.ListPrivateConnectionsRequest], + Awaitable[clouddms.ListPrivateConnectionsResponse], + ]: + r"""Return a callable for the list private connections method over gRPC. + + Retrieves a list of private connections in a given + project and location. + + Returns: + Callable[[~.ListPrivateConnectionsRequest], + Awaitable[~.ListPrivateConnectionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_private_connections" not in self._stubs: + self._stubs["list_private_connections"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ListPrivateConnections", + request_serializer=clouddms.ListPrivateConnectionsRequest.serialize, + response_deserializer=clouddms.ListPrivateConnectionsResponse.deserialize, + ) + return self._stubs["list_private_connections"] + + @property + def delete_private_connection( + self, + ) -> Callable[ + [clouddms.DeletePrivateConnectionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete private connection method over gRPC. + + Deletes a single Database Migration Service private + connection. + + Returns: + Callable[[~.DeletePrivateConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_private_connection" not in self._stubs: + self._stubs["delete_private_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DeletePrivateConnection", + request_serializer=clouddms.DeletePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_private_connection"] + + @property + def get_conversion_workspace( + self, + ) -> Callable[ + [clouddms.GetConversionWorkspaceRequest], + Awaitable[conversionworkspace_resources.ConversionWorkspace], + ]: + r"""Return a callable for the get conversion workspace method over gRPC. + + Gets details of a single conversion workspace. + + Returns: + Callable[[~.GetConversionWorkspaceRequest], + Awaitable[~.ConversionWorkspace]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_conversion_workspace" not in self._stubs: + self._stubs["get_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/GetConversionWorkspace", + request_serializer=clouddms.GetConversionWorkspaceRequest.serialize, + response_deserializer=conversionworkspace_resources.ConversionWorkspace.deserialize, + ) + return self._stubs["get_conversion_workspace"] + + @property + def list_conversion_workspaces( + self, + ) -> Callable[ + [clouddms.ListConversionWorkspacesRequest], + Awaitable[clouddms.ListConversionWorkspacesResponse], + ]: + r"""Return a callable for the list conversion workspaces method over gRPC. + + Lists conversion workspaces in a given project and + location. + + Returns: + Callable[[~.ListConversionWorkspacesRequest], + Awaitable[~.ListConversionWorkspacesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_conversion_workspaces" not in self._stubs: + self._stubs["list_conversion_workspaces"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ListConversionWorkspaces", + request_serializer=clouddms.ListConversionWorkspacesRequest.serialize, + response_deserializer=clouddms.ListConversionWorkspacesResponse.deserialize, + ) + return self._stubs["list_conversion_workspaces"] + + @property + def create_conversion_workspace( + self, + ) -> Callable[ + [clouddms.CreateConversionWorkspaceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create conversion workspace method over gRPC. + + Creates a new conversion workspace in a given project + and location. + + Returns: + Callable[[~.CreateConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_conversion_workspace" not in self._stubs: + self._stubs["create_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/CreateConversionWorkspace", + request_serializer=clouddms.CreateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_conversion_workspace"] + + @property + def update_conversion_workspace( + self, + ) -> Callable[ + [clouddms.UpdateConversionWorkspaceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update conversion workspace method over gRPC. + + Updates the parameters of a single conversion + workspace. + + Returns: + Callable[[~.UpdateConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_conversion_workspace" not in self._stubs: + self._stubs["update_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/UpdateConversionWorkspace", + request_serializer=clouddms.UpdateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_conversion_workspace"] + + @property + def delete_conversion_workspace( + self, + ) -> Callable[ + [clouddms.DeleteConversionWorkspaceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete conversion workspace method over gRPC. + + Deletes a single conversion workspace. + + Returns: + Callable[[~.DeleteConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_conversion_workspace" not in self._stubs: + self._stubs["delete_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DeleteConversionWorkspace", + request_serializer=clouddms.DeleteConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_conversion_workspace"] + + @property + def seed_conversion_workspace( + self, + ) -> Callable[ + [clouddms.SeedConversionWorkspaceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the seed conversion workspace method over gRPC. + + Imports a snapshot of the source database into the + conversion workspace. + + Returns: + Callable[[~.SeedConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "seed_conversion_workspace" not in self._stubs: + self._stubs["seed_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/SeedConversionWorkspace", + request_serializer=clouddms.SeedConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["seed_conversion_workspace"] + + @property + def import_mapping_rules( + self, + ) -> Callable[ + [clouddms.ImportMappingRulesRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the import mapping rules method over gRPC. + + Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + Returns: + Callable[[~.ImportMappingRulesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_mapping_rules" not in self._stubs: + self._stubs["import_mapping_rules"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ImportMappingRules", + request_serializer=clouddms.ImportMappingRulesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_mapping_rules"] + + @property + def convert_conversion_workspace( + self, + ) -> Callable[ + [clouddms.ConvertConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the convert conversion workspace method over gRPC. + + Creates a draft tree schema for the destination + database. + + Returns: + Callable[[~.ConvertConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "convert_conversion_workspace" not in self._stubs: + self._stubs["convert_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ConvertConversionWorkspace", + request_serializer=clouddms.ConvertConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["convert_conversion_workspace"] + + @property + def commit_conversion_workspace( + self, + ) -> Callable[ + [clouddms.CommitConversionWorkspaceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the commit conversion workspace method over gRPC. + + Marks all the data in the conversion workspace as + committed. + + Returns: + Callable[[~.CommitConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "commit_conversion_workspace" not in self._stubs: + self._stubs["commit_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/CommitConversionWorkspace", + request_serializer=clouddms.CommitConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["commit_conversion_workspace"] + + @property + def rollback_conversion_workspace( + self, + ) -> Callable[ + [clouddms.RollbackConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the rollback conversion workspace method over gRPC. + + Rolls back a conversion workspace to the last + committed snapshot. + + Returns: + Callable[[~.RollbackConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_conversion_workspace" not in self._stubs: + self._stubs[ + "rollback_conversion_workspace" + ] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/RollbackConversionWorkspace", + request_serializer=clouddms.RollbackConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["rollback_conversion_workspace"] + + @property + def apply_conversion_workspace( + self, + ) -> Callable[ + [clouddms.ApplyConversionWorkspaceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the apply conversion workspace method over gRPC. + + Applies draft tree onto a specific destination + database. + + Returns: + Callable[[~.ApplyConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "apply_conversion_workspace" not in self._stubs: + self._stubs["apply_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ApplyConversionWorkspace", + request_serializer=clouddms.ApplyConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["apply_conversion_workspace"] + + @property + def describe_database_entities( + self, + ) -> Callable[ + [clouddms.DescribeDatabaseEntitiesRequest], + Awaitable[clouddms.DescribeDatabaseEntitiesResponse], + ]: + r"""Return a callable for the describe database entities method over gRPC. + + Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + Returns: + Callable[[~.DescribeDatabaseEntitiesRequest], + Awaitable[~.DescribeDatabaseEntitiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "describe_database_entities" not in self._stubs: + self._stubs["describe_database_entities"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DescribeDatabaseEntities", + request_serializer=clouddms.DescribeDatabaseEntitiesRequest.serialize, + response_deserializer=clouddms.DescribeDatabaseEntitiesResponse.deserialize, + ) + return self._stubs["describe_database_entities"] + + @property + def search_background_jobs( + self, + ) -> Callable[ + [clouddms.SearchBackgroundJobsRequest], + Awaitable[clouddms.SearchBackgroundJobsResponse], + ]: + r"""Return a callable for the search background jobs method over gRPC. + + Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + Returns: + Callable[[~.SearchBackgroundJobsRequest], + Awaitable[~.SearchBackgroundJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_background_jobs" not in self._stubs: + self._stubs["search_background_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/SearchBackgroundJobs", + request_serializer=clouddms.SearchBackgroundJobsRequest.serialize, + response_deserializer=clouddms.SearchBackgroundJobsResponse.deserialize, + ) + return self._stubs["search_background_jobs"] + + @property + def describe_conversion_workspace_revisions( + self, + ) -> Callable[ + [clouddms.DescribeConversionWorkspaceRevisionsRequest], + Awaitable[clouddms.DescribeConversionWorkspaceRevisionsResponse], + ]: + r"""Return a callable for the describe conversion workspace + revisions method over gRPC. + + Retrieves a list of committed revisions of a specific + conversion workspace. + + Returns: + Callable[[~.DescribeConversionWorkspaceRevisionsRequest], + Awaitable[~.DescribeConversionWorkspaceRevisionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "describe_conversion_workspace_revisions" not in self._stubs: + self._stubs[ + "describe_conversion_workspace_revisions" + ] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DescribeConversionWorkspaceRevisions", + request_serializer=clouddms.DescribeConversionWorkspaceRevisionsRequest.serialize, + response_deserializer=clouddms.DescribeConversionWorkspaceRevisionsResponse.deserialize, + ) + return self._stubs["describe_conversion_workspace_revisions"] + + @property + def fetch_static_ips( + self, + ) -> Callable[ + [clouddms.FetchStaticIpsRequest], Awaitable[clouddms.FetchStaticIpsResponse] + ]: + r"""Return a callable for the fetch static ips method over gRPC. + + Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + Returns: + Callable[[~.FetchStaticIpsRequest], + Awaitable[~.FetchStaticIpsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_static_ips" not in self._stubs: + self._stubs["fetch_static_ips"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/FetchStaticIps", + request_serializer=clouddms.FetchStaticIpsRequest.serialize, + response_deserializer=clouddms.FetchStaticIpsResponse.deserialize, + ) + return self._stubs["fetch_static_ips"] + def close(self): return self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + __all__ = ("DataMigrationServiceGrpcAsyncIOTransport",) diff --git a/google/cloud/clouddms_v1/types/__init__.py b/google/cloud/clouddms_v1/types/__init__.py index cb3b44b..284b203 100644 --- a/google/cloud/clouddms_v1/types/__init__.py +++ b/google/cloud/clouddms_v1/types/__init__.py @@ -14,87 +14,203 @@ # limitations under the License. # from .clouddms import ( + ApplyConversionWorkspaceRequest, + CommitConversionWorkspaceRequest, + ConvertConversionWorkspaceRequest, CreateConnectionProfileRequest, + CreateConversionWorkspaceRequest, CreateMigrationJobRequest, + CreatePrivateConnectionRequest, DeleteConnectionProfileRequest, + DeleteConversionWorkspaceRequest, DeleteMigrationJobRequest, + DeletePrivateConnectionRequest, + DescribeConversionWorkspaceRevisionsRequest, + DescribeConversionWorkspaceRevisionsResponse, + DescribeDatabaseEntitiesRequest, + DescribeDatabaseEntitiesResponse, + FetchStaticIpsRequest, + FetchStaticIpsResponse, GenerateSshScriptRequest, GetConnectionProfileRequest, + GetConversionWorkspaceRequest, GetMigrationJobRequest, + GetPrivateConnectionRequest, + ImportMappingRulesRequest, ListConnectionProfilesRequest, ListConnectionProfilesResponse, + ListConversionWorkspacesRequest, + ListConversionWorkspacesResponse, ListMigrationJobsRequest, ListMigrationJobsResponse, + ListPrivateConnectionsRequest, + ListPrivateConnectionsResponse, OperationMetadata, PromoteMigrationJobRequest, RestartMigrationJobRequest, ResumeMigrationJobRequest, + RollbackConversionWorkspaceRequest, + SearchBackgroundJobsRequest, + SearchBackgroundJobsResponse, + SeedConversionWorkspaceRequest, SshScript, StartMigrationJobRequest, StopMigrationJobRequest, UpdateConnectionProfileRequest, + UpdateConversionWorkspaceRequest, UpdateMigrationJobRequest, VerifyMigrationJobRequest, VmCreationConfig, VmSelectionConfig, ) from .clouddms_resources import ( + AlloyDbConnectionProfile, + AlloyDbSettings, CloudSqlConnectionProfile, CloudSqlSettings, ConnectionProfile, + ConversionWorkspaceInfo, DatabaseEngine, DatabaseProvider, DatabaseType, + ForwardSshTunnelConnectivity, MigrationJob, MigrationJobVerificationError, MySqlConnectionProfile, + NetworkArchitecture, + OracleConnectionProfile, PostgreSqlConnectionProfile, + PrivateConnection, + PrivateConnectivity, + PrivateServiceConnectConnectivity, ReverseSshConnectivity, SqlAclEntry, SqlIpConfig, SslConfig, StaticIpConnectivity, + StaticServiceIpConnectivity, + VpcPeeringConfig, VpcPeeringConnectivity, ) +from .conversionworkspace_resources import ( + BackgroundJobLogEntry, + BackgroundJobType, + ColumnEntity, + ConstraintEntity, + ConversionWorkspace, + DatabaseEngineInfo, + DatabaseEntity, + DatabaseEntityType, + EntityMapping, + EntityMappingLogEntry, + FunctionEntity, + ImportRulesFileFormat, + IndexEntity, + PackageEntity, + SchemaEntity, + SequenceEntity, + StoredProcedureEntity, + SynonymEntity, + TableEntity, + TriggerEntity, + ViewEntity, +) __all__ = ( + "ApplyConversionWorkspaceRequest", + "CommitConversionWorkspaceRequest", + "ConvertConversionWorkspaceRequest", "CreateConnectionProfileRequest", + "CreateConversionWorkspaceRequest", "CreateMigrationJobRequest", + "CreatePrivateConnectionRequest", "DeleteConnectionProfileRequest", + "DeleteConversionWorkspaceRequest", "DeleteMigrationJobRequest", + "DeletePrivateConnectionRequest", + "DescribeConversionWorkspaceRevisionsRequest", + "DescribeConversionWorkspaceRevisionsResponse", + "DescribeDatabaseEntitiesRequest", + "DescribeDatabaseEntitiesResponse", + "FetchStaticIpsRequest", + "FetchStaticIpsResponse", "GenerateSshScriptRequest", "GetConnectionProfileRequest", + "GetConversionWorkspaceRequest", "GetMigrationJobRequest", + "GetPrivateConnectionRequest", + "ImportMappingRulesRequest", "ListConnectionProfilesRequest", "ListConnectionProfilesResponse", + "ListConversionWorkspacesRequest", + "ListConversionWorkspacesResponse", "ListMigrationJobsRequest", "ListMigrationJobsResponse", + "ListPrivateConnectionsRequest", + "ListPrivateConnectionsResponse", "OperationMetadata", "PromoteMigrationJobRequest", "RestartMigrationJobRequest", "ResumeMigrationJobRequest", + "RollbackConversionWorkspaceRequest", + "SearchBackgroundJobsRequest", + "SearchBackgroundJobsResponse", + "SeedConversionWorkspaceRequest", "SshScript", "StartMigrationJobRequest", "StopMigrationJobRequest", "UpdateConnectionProfileRequest", + "UpdateConversionWorkspaceRequest", "UpdateMigrationJobRequest", "VerifyMigrationJobRequest", "VmCreationConfig", "VmSelectionConfig", + "AlloyDbConnectionProfile", + "AlloyDbSettings", "CloudSqlConnectionProfile", "CloudSqlSettings", "ConnectionProfile", + "ConversionWorkspaceInfo", "DatabaseType", + "ForwardSshTunnelConnectivity", "MigrationJob", "MigrationJobVerificationError", "MySqlConnectionProfile", + "OracleConnectionProfile", "PostgreSqlConnectionProfile", + "PrivateConnection", + "PrivateConnectivity", + "PrivateServiceConnectConnectivity", "ReverseSshConnectivity", "SqlAclEntry", "SqlIpConfig", "SslConfig", "StaticIpConnectivity", + "StaticServiceIpConnectivity", + "VpcPeeringConfig", "VpcPeeringConnectivity", "DatabaseEngine", "DatabaseProvider", + "NetworkArchitecture", + "BackgroundJobLogEntry", + "ColumnEntity", + "ConstraintEntity", + "ConversionWorkspace", + "DatabaseEngineInfo", + "DatabaseEntity", + "EntityMapping", + "EntityMappingLogEntry", + "FunctionEntity", + "IndexEntity", + "PackageEntity", + "SchemaEntity", + "SequenceEntity", + "StoredProcedureEntity", + "SynonymEntity", + "TableEntity", + "TriggerEntity", + "ViewEntity", + "BackgroundJobType", + "DatabaseEntityType", + "ImportRulesFileFormat", ) diff --git a/google/cloud/clouddms_v1/types/clouddms.py b/google/cloud/clouddms_v1/types/clouddms.py index 089aa49..9cf97e1 100644 --- a/google/cloud/clouddms_v1/types/clouddms.py +++ b/google/cloud/clouddms_v1/types/clouddms.py @@ -21,7 +21,10 @@ from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore -from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms_resources, + conversionworkspace_resources, +) __protobuf__ = proto.module( package="google.cloud.clouddms.v1", @@ -48,25 +51,50 @@ "CreateConnectionProfileRequest", "UpdateConnectionProfileRequest", "DeleteConnectionProfileRequest", + "CreatePrivateConnectionRequest", + "ListPrivateConnectionsRequest", + "ListPrivateConnectionsResponse", + "DeletePrivateConnectionRequest", + "GetPrivateConnectionRequest", "OperationMetadata", + "ListConversionWorkspacesRequest", + "ListConversionWorkspacesResponse", + "GetConversionWorkspaceRequest", + "CreateConversionWorkspaceRequest", + "UpdateConversionWorkspaceRequest", + "DeleteConversionWorkspaceRequest", + "CommitConversionWorkspaceRequest", + "RollbackConversionWorkspaceRequest", + "ApplyConversionWorkspaceRequest", + "SeedConversionWorkspaceRequest", + "ConvertConversionWorkspaceRequest", + "ImportMappingRulesRequest", + "DescribeDatabaseEntitiesRequest", + "DescribeDatabaseEntitiesResponse", + "SearchBackgroundJobsRequest", + "SearchBackgroundJobsResponse", + "DescribeConversionWorkspaceRevisionsRequest", + "DescribeConversionWorkspaceRevisionsResponse", + "FetchStaticIpsRequest", + "FetchStaticIpsResponse", }, ) class ListMigrationJobsRequest(proto.Message): - r"""Retrieve a list of all migration jobs in a given project and + r"""Retrieves a list of all migration jobs in a given project and location. Attributes: parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of migrationJobs. page_size (int): The maximum number of migration jobs to return. The service may return fewer than this value. If unspecified, at most 50 migration jobs will be returned. The maximum value is 1000; - values above 1000 will be coerced to 1000. + values above 1000 are coerced to 1000. page_token (str): The nextPageToken value received in the previous call to migrationJobs.list, used in the @@ -123,7 +151,7 @@ class ListMigrationJobsResponse(proto.Message): migration_jobs (MutableSequence[google.cloud.clouddms_v1.types.MigrationJob]): The list of migration jobs objects. next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the + A token which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. unreachable (MutableSequence[str]): @@ -172,7 +200,7 @@ class CreateMigrationJobRequest(proto.Message): Attributes: parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of migration jobs. migration_job_id (str): Required. The ID of the instance to create. @@ -181,13 +209,13 @@ class CreateMigrationJobRequest(proto.Message): job `__ object. request_id (str): - A unique id used to identify the request. If the server - receives two requests with the same id, then the second - request will be ignored. + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. It is recommended to always set this value to a UUID. - The id must contain only letters (a-z, A-Z), numbers (0-9), + The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. """ @@ -217,19 +245,19 @@ class UpdateMigrationJobRequest(proto.Message): Attributes: update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to specify the - fields to be overwritten in the migration job - resource by the update. + fields to be overwritten by the update in the + conversion workspace resource. migration_job (google.cloud.clouddms_v1.types.MigrationJob): Required. The migration job parameters to update. request_id (str): - A unique id used to identify the request. If the server - receives two requests with the same id, then the second - request will be ignored. + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. It is recommended to always set this value to a UUID. - The id must contain only letters (a-z, A-Z), numbers (0-9), + The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. """ @@ -258,13 +286,13 @@ class DeleteMigrationJobRequest(proto.Message): Required. Name of the migration job resource to delete. request_id (str): - A unique id used to identify the request. If the server - receives two requests with the same id, then the second - request will be ignored. + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. It is recommended to always set this value to a UUID. - The id must contain only letters (a-z, A-Z), numbers (0-9), + The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. force (bool): @@ -401,7 +429,7 @@ class GenerateSshScriptRequest(proto.Message): This field is a member of `oneof`_ ``vm_config``. vm_port (int): The port that will be open on the bastion - host + host. """ migration_job: str = proto.Field( @@ -492,14 +520,14 @@ class ListConnectionProfilesRequest(proto.Message): Attributes: parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of connection profiles. page_size (int): The maximum number of connection profiles to return. The service may return fewer than this value. If unspecified, at most 50 connection profiles will be returned. The maximum value is - 1000; values above 1000 will be coerced to 1000. + 1000; values above 1000 are coerced to 1000. page_token (str): A page token, received from a previous ``ListConnectionProfiles`` call. Provide this to retrieve @@ -521,7 +549,8 @@ class ListConnectionProfilesRequest(proto.Message): %lt;my_username%gt;** to list all connection profiles configured to connect with a specific username. order_by (str): - the order by fields for the result. + A comma-separated list of fields to order + results according to. """ parent: str = proto.Field( @@ -553,7 +582,7 @@ class ListConnectionProfilesResponse(proto.Message): connection_profiles (MutableSequence[google.cloud.clouddms_v1.types.ConnectionProfile]): The response list of connection profiles. next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the + A token which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. unreachable (MutableSequence[str]): @@ -601,7 +630,7 @@ class CreateConnectionProfileRequest(proto.Message): Attributes: parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of connection profiles. connection_profile_id (str): Required. The connection profile identifier. @@ -609,15 +638,24 @@ class CreateConnectionProfileRequest(proto.Message): Required. The create request body including the connection profile data request_id (str): - A unique id used to identify the request. If the server - receives two requests with the same id, then the second - request will be ignored. + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. It is recommended to always set this value to a UUID. - The id must contain only letters (a-z, A-Z), numbers (0-9), + The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. + validate_only (bool): + Optional. Only validate the connection + profile, but don't create any resources. The + default is false. Only supported for Oracle + connection profiles. + skip_validation (bool): + Optional. Create the connection profile + without validating it. The default is false. + Only supported for Oracle connection profiles. """ parent: str = proto.Field( @@ -637,6 +675,14 @@ class CreateConnectionProfileRequest(proto.Message): proto.STRING, number=4, ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + skip_validation: bool = proto.Field( + proto.BOOL, + number=6, + ) class UpdateConnectionProfileRequest(proto.Message): @@ -645,21 +691,30 @@ class UpdateConnectionProfileRequest(proto.Message): Attributes: update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to specify the - fields to be overwritten in the connection - profile resource by the update. + fields to be overwritten by the update in the + conversion workspace resource. connection_profile (google.cloud.clouddms_v1.types.ConnectionProfile): Required. The connection profile parameters to update. request_id (str): - A unique id used to identify the request. If the server - receives two requests with the same id, then the second - request will be ignored. + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. It is recommended to always set this value to a UUID. - The id must contain only letters (a-z, A-Z), numbers (0-9), + The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. + validate_only (bool): + Optional. Only validate the connection + profile, but don't update any resources. The + default is false. Only supported for Oracle + connection profiles. + skip_validation (bool): + Optional. Update the connection profile + without validating it. The default is false. + Only supported for Oracle connection profiles. """ update_mask: field_mask_pb2.FieldMask = proto.Field( @@ -676,6 +731,14 @@ class UpdateConnectionProfileRequest(proto.Message): proto.STRING, number=3, ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + skip_validation: bool = proto.Field( + proto.BOOL, + number=5, + ) class DeleteConnectionProfileRequest(proto.Message): @@ -686,13 +749,13 @@ class DeleteConnectionProfileRequest(proto.Message): Required. Name of the connection profile resource to delete. request_id (str): - A unique id used to identify the request. If the server - receives two requests with the same id, then the second - request will be ignored. + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. It is recommended to always set this value to a UUID. - The id must contain only letters (a-z, A-Z), numbers (0-9), + The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. force (bool): @@ -715,6 +778,192 @@ class DeleteConnectionProfileRequest(proto.Message): ) +class CreatePrivateConnectionRequest(proto.Message): + r"""Request message to create a new private connection in the + specified project and region. + + Attributes: + parent (str): + Required. The parent that owns the collection + of PrivateConnections. + private_connection_id (str): + Required. The private connection identifier. + private_connection (google.cloud.clouddms_v1.types.PrivateConnection): + Required. The private connection resource to + create. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + skip_validation (bool): + Optional. If set to true, will skip + validations. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + private_connection_id: str = proto.Field( + proto.STRING, + number=2, + ) + private_connection: clouddms_resources.PrivateConnection = proto.Field( + proto.MESSAGE, + number=3, + message=clouddms_resources.PrivateConnection, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + skip_validation: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class ListPrivateConnectionsRequest(proto.Message): + r"""Request message to retrieve a list of private connections in + a given project and location. + + Attributes: + parent (str): + Required. The parent that owns the collection + of private connections. + page_size (int): + Maximum number of private connections to + return. If unspecified, at most 50 private + connections that are returned. The maximum value + is 1000; values above 1000 are coerced to 1000. + page_token (str): + Page token received from a previous + ``ListPrivateConnections`` call. Provide this to retrieve + the subsequent page. + + When paginating, all other parameters provided to + ``ListPrivateConnections`` must match the call that provided + the page token. + filter (str): + A filter expression that filters private connections listed + in the response. The expression must specify the field name, + a comparison operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The comparison operator must be either =, !=, >, or + <. For example, list private connections created this year + by specifying **createTime %gt; + 2021-01-01T00:00:00.000000000Z**. + order_by (str): + Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListPrivateConnectionsResponse(proto.Message): + r"""Response message for 'ListPrivateConnections' request. + + Attributes: + private_connections (MutableSequence[google.cloud.clouddms_v1.types.PrivateConnection]): + List of private connections. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + private_connections: MutableSequence[ + clouddms_resources.PrivateConnection + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=clouddms_resources.PrivateConnection, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeletePrivateConnectionRequest(proto.Message): + r"""Request message to delete a private connection. + + Attributes: + name (str): + Required. The name of the private connection + to delete. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetPrivateConnectionRequest(proto.Message): + r"""Request message to get a private connection resource. + + Attributes: + name (str): + Required. The name of the private connection + to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class OperationMetadata(proto.Message): r"""Represents the metadata of the long-running operation. @@ -777,4 +1026,713 @@ class OperationMetadata(proto.Message): ) +class ListConversionWorkspacesRequest(proto.Message): + r"""Retrieve a list of all conversion workspaces in a given + project and location. + + Attributes: + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + page_size (int): + The maximum number of conversion workspaces + to return. The service may return fewer than + this value. If unspecified, at most 50 sets are + returned. + page_token (str): + The nextPageToken value received in the + previous call to conversionWorkspaces.list, used + in the subsequent request to retrieve the next + page of results. On first call this should be + left blank. When paginating, all other + parameters provided to conversionWorkspaces.list + must match the call that provided the page + token. + filter (str): + A filter expression that filters conversion workspaces + listed in the response. The expression must specify the + field name, a comparison operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The comparison operator must be either + =, !=, >, or <. For example, list conversion workspaces + created this year by specifying **createTime %gt; + 2020-01-01T00:00:00.000000000Z.** You can also filter nested + fields. For example, you could specify **source.version = + "12.c.1"** to select all conversion workspaces with source + database version equal to 12.c.1. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListConversionWorkspacesResponse(proto.Message): + r"""Response message for 'ListConversionWorkspaces' request. + + Attributes: + conversion_workspaces (MutableSequence[google.cloud.clouddms_v1.types.ConversionWorkspace]): + The list of conversion workspace objects. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + conversion_workspaces: MutableSequence[ + conversionworkspace_resources.ConversionWorkspace + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.ConversionWorkspace, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetConversionWorkspaceRequest(proto.Message): + r"""Request message for 'GetConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateConversionWorkspaceRequest(proto.Message): + r"""Request message to create a new Conversion Workspace + in the specified project and region. + + Attributes: + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + conversion_workspace_id (str): + Required. The ID of the conversion workspace + to create. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. Represents a conversion workspace + object. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + conversion_workspace_id: str = proto.Field( + proto.STRING, + number=2, + ) + conversion_workspace: conversionworkspace_resources.ConversionWorkspace = ( + proto.Field( + proto.MESSAGE, + number=3, + message=conversionworkspace_resources.ConversionWorkspace, + ) + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateConversionWorkspaceRequest(proto.Message): + r"""Request message for 'UpdateConversionWorkspace' request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the + fields to be overwritten by the update in the + conversion workspace resource. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. The conversion workspace parameters + to update. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + conversion_workspace: conversionworkspace_resources.ConversionWorkspace = ( + proto.Field( + proto.MESSAGE, + number=2, + message=conversionworkspace_resources.ConversionWorkspace, + ) + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteConversionWorkspaceRequest(proto.Message): + r"""Request message for 'DeleteConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to delete. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CommitConversionWorkspaceRequest(proto.Message): + r"""Request message for 'CommitConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to commit. + commit_name (str): + Optional. Optional name of the commit. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + commit_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RollbackConversionWorkspaceRequest(proto.Message): + r"""Request message for 'RollbackConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to roll back to. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ApplyConversionWorkspaceRequest(proto.Message): + r"""Request message for 'ApplyConversionWorkspace' request. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The name of the conversion workspace resource for + which to apply the draft tree. Must be in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + filter (str): + Filter which entities to apply. Leaving this + field empty will apply all of the entities. + Supports Google AIP 160 based filtering. + connection_profile (str): + Fully qualified (Uri) name of the destination + connection profile. + + This field is a member of `oneof`_ ``destination``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + connection_profile: str = proto.Field( + proto.STRING, + number=100, + oneof="destination", + ) + + +class SeedConversionWorkspaceRequest(proto.Message): + r"""Request message for 'SeedConversionWorkspace' request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Name of the conversion workspace resource to seed with new + database structure, in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + auto_commit (bool): + Should the conversion workspace be committed + automatically after the seed operation. + source_connection_profile (str): + Fully qualified (Uri) name of the source + connection profile. + + This field is a member of `oneof`_ ``seed_from``. + destination_connection_profile (str): + Fully qualified (Uri) name of the destination + connection profile. + + This field is a member of `oneof`_ ``seed_from``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + auto_commit: bool = proto.Field( + proto.BOOL, + number=2, + ) + source_connection_profile: str = proto.Field( + proto.STRING, + number=100, + oneof="seed_from", + ) + destination_connection_profile: str = proto.Field( + proto.STRING, + number=101, + oneof="seed_from", + ) + + +class ConvertConversionWorkspaceRequest(proto.Message): + r"""Request message for 'ConvertConversionWorkspace' request. + + Attributes: + name (str): + Name of the conversion workspace resource to convert in the + form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + auto_commit (bool): + Specifies whether the conversion workspace is + to be committed automatically after the + conversion. + filter (str): + Filter the entities to convert. Leaving this + field empty will convert all of the entities. + Supports Google AIP-160 style filtering. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + auto_commit: bool = proto.Field( + proto.BOOL, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ImportMappingRulesRequest(proto.Message): + r"""Request message for 'ImportMappingRules' request. + + Attributes: + parent (str): + Required. Name of the conversion workspace resource to + import the rules to in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + rules_format (google.cloud.clouddms_v1.types.ImportRulesFileFormat): + The format of the rules content file. + rules_files (MutableSequence[google.cloud.clouddms_v1.types.ImportMappingRulesRequest.RulesFile]): + One or more rules files. + auto_commit (bool): + Should the conversion workspace be committed + automatically after the import operation. + """ + + class RulesFile(proto.Message): + r"""Details of a single rules file. + + Attributes: + rules_source_filename (str): + The filename of the rules that needs to be + converted. The filename is used mainly so that + future logs of the import rules job contain it, + and can therefore be searched by it. + rules_content (str): + The text content of the rules that needs to + be converted. + """ + + rules_source_filename: str = proto.Field( + proto.STRING, + number=1, + ) + rules_content: str = proto.Field( + proto.STRING, + number=2, + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + rules_format: conversionworkspace_resources.ImportRulesFileFormat = proto.Field( + proto.ENUM, + number=2, + enum=conversionworkspace_resources.ImportRulesFileFormat, + ) + rules_files: MutableSequence[RulesFile] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=RulesFile, + ) + auto_commit: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class DescribeDatabaseEntitiesRequest(proto.Message): + r"""Request message for 'DescribeDatabaseEntities' request. + + Attributes: + conversion_workspace (str): + Required. Name of the conversion workspace resource whose + database entities are described. Must be in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + page_size (int): + The maximum number of entities to return. The + service may return fewer entities than the value + specifies. + page_token (str): + The nextPageToken value received in the + previous call to + conversionWorkspace.describeDatabaseEntities, + used in the subsequent request to retrieve the + next page of results. On first call this should + be left blank. When paginating, all other + parameters provided to + conversionWorkspace.describeDatabaseEntities + must match the call that provided the page + token. + tree (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest.DBTreeType): + The tree to fetch. + uncommitted (bool): + Whether to retrieve the latest committed version of the + entities or the latest version. This field is ignored if a + specific commit_id is specified. + commit_id (str): + Request a specific commit ID. If not + specified, the entities from the latest commit + are returned. + filter (str): + Filter the returned entities based on AIP-160 + standard. + """ + + class DBTreeType(proto.Enum): + r"""The type of a tree to return + + Values: + DB_TREE_TYPE_UNSPECIFIED (0): + Unspecified tree type. + SOURCE_TREE (1): + The source database tree. + DRAFT_TREE (2): + The draft database tree. + DESTINATION_TREE (3): + The destination database tree. + """ + DB_TREE_TYPE_UNSPECIFIED = 0 + SOURCE_TREE = 1 + DRAFT_TREE = 2 + DESTINATION_TREE = 3 + + conversion_workspace: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + tree: DBTreeType = proto.Field( + proto.ENUM, + number=6, + enum=DBTreeType, + ) + uncommitted: bool = proto.Field( + proto.BOOL, + number=11, + ) + commit_id: str = proto.Field( + proto.STRING, + number=12, + ) + filter: str = proto.Field( + proto.STRING, + number=13, + ) + + +class DescribeDatabaseEntitiesResponse(proto.Message): + r"""Response message for 'DescribeDatabaseEntities' request. + + Attributes: + database_entities (MutableSequence[google.cloud.clouddms_v1.types.DatabaseEntity]): + The list of database entities for the + conversion workspace. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + database_entities: MutableSequence[ + conversionworkspace_resources.DatabaseEntity + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.DatabaseEntity, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SearchBackgroundJobsRequest(proto.Message): + r"""Request message for 'SearchBackgroundJobs' request. + + Attributes: + conversion_workspace (str): + Required. Name of the conversion workspace resource whose + jobs are listed, in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + return_most_recent_per_job_type (bool): + Optional. Whether or not to return just the + most recent job per job type, + max_size (int): + Optional. The maximum number of jobs to + return. The service may return fewer than this + value. If unspecified, at most 100 jobs are + returned. The maximum value is 100; values above + 100 are coerced to 100. + completed_until_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. If provided, only returns jobs that + completed until (not including) the given + timestamp. + """ + + conversion_workspace: str = proto.Field( + proto.STRING, + number=1, + ) + return_most_recent_per_job_type: bool = proto.Field( + proto.BOOL, + number=2, + ) + max_size: int = proto.Field( + proto.INT32, + number=3, + ) + completed_until_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class SearchBackgroundJobsResponse(proto.Message): + r"""Response message for 'SearchBackgroundJobs' request. + + Attributes: + jobs (MutableSequence[google.cloud.clouddms_v1.types.BackgroundJobLogEntry]): + The list of conversion workspace mapping + rules. + """ + + jobs: MutableSequence[ + conversionworkspace_resources.BackgroundJobLogEntry + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.BackgroundJobLogEntry, + ) + + +class DescribeConversionWorkspaceRevisionsRequest(proto.Message): + r"""Request message for 'DescribeConversionWorkspaceRevisions' + request. + + Attributes: + conversion_workspace (str): + Required. Name of the conversion workspace resource whose + revisions are listed. Must be in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + commit_id (str): + Optional. Optional filter to request a + specific commit ID. + """ + + conversion_workspace: str = proto.Field( + proto.STRING, + number=1, + ) + commit_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DescribeConversionWorkspaceRevisionsResponse(proto.Message): + r"""Response message for 'DescribeConversionWorkspaceRevisions' + request. + + Attributes: + revisions (MutableSequence[google.cloud.clouddms_v1.types.ConversionWorkspace]): + The list of conversion workspace revisions. + """ + + revisions: MutableSequence[ + conversionworkspace_resources.ConversionWorkspace + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.ConversionWorkspace, + ) + + +class FetchStaticIpsRequest(proto.Message): + r"""Request message for 'FetchStaticIps' request. + + Attributes: + name (str): + Required. The resource name for the location for which + static IPs should be returned. Must be in the format + ``projects/*/locations/*``. + page_size (int): + Maximum number of IPs to return. + page_token (str): + A page token, received from a previous ``FetchStaticIps`` + call. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchStaticIpsResponse(proto.Message): + r"""Response message for a 'FetchStaticIps' request. + + Attributes: + static_ips (MutableSequence[str]): + List of static IPs. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + static_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/clouddms_v1/types/clouddms_resources.py b/google/cloud/clouddms_v1/types/clouddms_resources.py index bfea88a..1e51da9 100644 --- a/google/cloud/clouddms_v1/types/clouddms_resources.py +++ b/google/cloud/clouddms_v1/types/clouddms_resources.py @@ -26,26 +26,55 @@ __protobuf__ = proto.module( package="google.cloud.clouddms.v1", manifest={ + "NetworkArchitecture", "DatabaseEngine", "DatabaseProvider", "SslConfig", "MySqlConnectionProfile", "PostgreSqlConnectionProfile", + "OracleConnectionProfile", "CloudSqlConnectionProfile", + "AlloyDbConnectionProfile", "SqlAclEntry", "SqlIpConfig", "CloudSqlSettings", + "AlloyDbSettings", "StaticIpConnectivity", + "PrivateServiceConnectConnectivity", "ReverseSshConnectivity", "VpcPeeringConnectivity", + "ForwardSshTunnelConnectivity", + "StaticServiceIpConnectivity", + "PrivateConnectivity", "DatabaseType", "MigrationJob", + "ConversionWorkspaceInfo", "ConnectionProfile", "MigrationJobVerificationError", + "PrivateConnection", + "VpcPeeringConfig", }, ) +class NetworkArchitecture(proto.Enum): + r""" + + Values: + NETWORK_ARCHITECTURE_UNSPECIFIED (0): + No description available. + NETWORK_ARCHITECTURE_OLD_CSQL_PRODUCER (1): + Instance is in Cloud SQL's old producer + network architecture. + NETWORK_ARCHITECTURE_NEW_CSQL_PRODUCER (2): + Instance is in Cloud SQL's new producer + network architecture. + """ + NETWORK_ARCHITECTURE_UNSPECIFIED = 0 + NETWORK_ARCHITECTURE_OLD_CSQL_PRODUCER = 1 + NETWORK_ARCHITECTURE_NEW_CSQL_PRODUCER = 2 + + class DatabaseEngine(proto.Enum): r"""The database engine types. @@ -57,10 +86,13 @@ class DatabaseEngine(proto.Enum): The source engine is MySQL. POSTGRESQL (2): The source engine is PostgreSQL. + ORACLE (4): + The source engine is Oracle. """ DATABASE_ENGINE_UNSPECIFIED = 0 MYSQL = 1 POSTGRESQL = 2 + ORACLE = 4 class DatabaseProvider(proto.Enum): @@ -73,10 +105,16 @@ class DatabaseProvider(proto.Enum): CloudSQL runs the database. RDS (2): RDS runs the database. + AURORA (3): + Amazon Aurora. + ALLOYDB (4): + AlloyDB. """ DATABASE_PROVIDER_UNSPECIFIED = 0 CLOUDSQL = 1 RDS = 2 + AURORA = 3 + ALLOYDB = 4 class SslConfig(proto.Message): @@ -209,6 +247,13 @@ class PostgreSqlConnectionProfile(proto.Message): r"""Specifies connection parameters required specifically for PostgreSQL databases. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: host (str): Required. The IP or hostname of the source @@ -238,6 +283,19 @@ class PostgreSqlConnectionProfile(proto.Message): If the source is a Cloud SQL database, use this field to provide the Cloud SQL instance ID of the source. + network_architecture (google.cloud.clouddms_v1.types.NetworkArchitecture): + Output only. If the source is a Cloud SQL + database, this field indicates the network + architecture it's associated with. + static_ip_connectivity (google.cloud.clouddms_v1.types.StaticIpConnectivity): + Static ip connectivity data (default, no + additional details needed). + + This field is a member of `oneof`_ ``connectivity``. + private_service_connect_connectivity (google.cloud.clouddms_v1.types.PrivateServiceConnectConnectivity): + Private service connect connectivity. + + This field is a member of `oneof`_ ``connectivity``. """ host: str = proto.Field( @@ -269,6 +327,119 @@ class PostgreSqlConnectionProfile(proto.Message): proto.STRING, number=7, ) + network_architecture: "NetworkArchitecture" = proto.Field( + proto.ENUM, + number=8, + enum="NetworkArchitecture", + ) + static_ip_connectivity: "StaticIpConnectivity" = proto.Field( + proto.MESSAGE, + number=100, + oneof="connectivity", + message="StaticIpConnectivity", + ) + private_service_connect_connectivity: "PrivateServiceConnectConnectivity" = ( + proto.Field( + proto.MESSAGE, + number=101, + oneof="connectivity", + message="PrivateServiceConnectConnectivity", + ) + ) + + +class OracleConnectionProfile(proto.Message): + r"""Specifies connection parameters required specifically for + Oracle databases. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + host (str): + Required. The IP or hostname of the source + Oracle database. + port (int): + Required. The network port of the source + Oracle database. + username (str): + Required. The username that Database + Migration Service will use to connect to the + database. The value is encrypted when stored in + Database Migration Service. + password (str): + Required. Input only. The password for the + user that Database Migration Service will be + using to connect to the database. This field is + not returned on request, and the value is + encrypted when stored in Database Migration + Service. + password_set (bool): + Output only. Indicates whether a new password + is included in the request. + database_service (str): + Required. Database service for the Oracle + connection. + static_service_ip_connectivity (google.cloud.clouddms_v1.types.StaticServiceIpConnectivity): + Static Service IP connectivity. + + This field is a member of `oneof`_ ``connectivity``. + forward_ssh_connectivity (google.cloud.clouddms_v1.types.ForwardSshTunnelConnectivity): + Forward SSH tunnel connectivity. + + This field is a member of `oneof`_ ``connectivity``. + private_connectivity (google.cloud.clouddms_v1.types.PrivateConnectivity): + Private connectivity. + + This field is a member of `oneof`_ ``connectivity``. + """ + + host: str = proto.Field( + proto.STRING, + number=1, + ) + port: int = proto.Field( + proto.INT32, + number=2, + ) + username: str = proto.Field( + proto.STRING, + number=3, + ) + password: str = proto.Field( + proto.STRING, + number=4, + ) + password_set: bool = proto.Field( + proto.BOOL, + number=5, + ) + database_service: str = proto.Field( + proto.STRING, + number=6, + ) + static_service_ip_connectivity: "StaticServiceIpConnectivity" = proto.Field( + proto.MESSAGE, + number=100, + oneof="connectivity", + message="StaticServiceIpConnectivity", + ) + forward_ssh_connectivity: "ForwardSshTunnelConnectivity" = proto.Field( + proto.MESSAGE, + number=101, + oneof="connectivity", + message="ForwardSshTunnelConnectivity", + ) + private_connectivity: "PrivateConnectivity" = proto.Field( + proto.MESSAGE, + number=102, + oneof="connectivity", + message="PrivateConnectivity", + ) class CloudSqlConnectionProfile(proto.Message): @@ -289,6 +460,12 @@ class CloudSqlConnectionProfile(proto.Message): public_ip (str): Output only. The Cloud SQL database instance's public IP. + additional_public_ip (str): + Output only. The Cloud SQL database + instance's additional (outgoing) public IP. Used + when the Cloud SQL database availability type is + REGIONAL (i.e. multiple zones / highly + available). """ cloud_sql_id: str = proto.Field( @@ -308,6 +485,34 @@ class CloudSqlConnectionProfile(proto.Message): proto.STRING, number=4, ) + additional_public_ip: str = proto.Field( + proto.STRING, + number=5, + ) + + +class AlloyDbConnectionProfile(proto.Message): + r"""Specifies required connection parameters, and the parameters + required to create an AlloyDB destination cluster. + + Attributes: + cluster_id (str): + Required. The AlloyDB cluster ID that this + connection profile is associated with. + settings (google.cloud.clouddms_v1.types.AlloyDbSettings): + Immutable. Metadata used to create the + destination AlloyDB cluster. + """ + + cluster_id: str = proto.Field( + proto.STRING, + number=1, + ) + settings: "AlloyDbSettings" = proto.Field( + proto.MESSAGE, + number=2, + message="AlloyDbSettings", + ) class SqlAclEntry(proto.Message): @@ -373,6 +578,17 @@ class SqlIpConfig(proto.Message): SQL instance is accessible for private IP. For example, ``projects/myProject/global/networks/default``. This setting can be updated, but it cannot be removed after it is set. + allocated_ip_range (str): + Optional. The name of the allocated IP + address range for the private IP Cloud SQL + instance. This name refers to an already + allocated IP range address. If set, the instance + IP address will be created in the allocated + range. Note that this IP address range can't be + modified after the instance is created. If you + change the VPC when configuring connectivity + settings for the migration job, this field is + not relevant. require_ssl (google.protobuf.wrappers_pb2.BoolValue): Whether SSL connections over IP should be enforced or not. @@ -392,6 +608,10 @@ class SqlIpConfig(proto.Message): proto.STRING, number=2, ) + allocated_ip_range: str = proto.Field( + proto.STRING, + number=5, + ) require_ssl: wrappers_pb2.BoolValue = proto.Field( proto.MESSAGE, number=3, @@ -465,7 +685,13 @@ class CloudSqlSettings(proto.Message): is 10GB. zone (str): The Google Cloud Platform zone where your - Cloud SQL datdabse instance is located. + Cloud SQL database instance is located. + secondary_zone (str): + Optional. The Google Cloud Platform zone + where the failover Cloud SQL database instance + is located. Used when the Cloud SQL database + availability type is REGIONAL (i.e. multiple + zones / highly available). source_id (str): The Database Migration Service source connection profile ID, in the format: @@ -478,6 +704,15 @@ class CloudSqlSettings(proto.Message): collation (str): The Cloud SQL default instance level collation. + cmek_key_name (str): + The KMS key name used for the csql instance. + availability_type (google.cloud.clouddms_v1.types.CloudSqlSettings.SqlAvailabilityType): + Optional. Availability type. Potential values: + + - ``ZONAL``: The instance serves data from only one zone. + Outages in that zone affect data availability. + - ``REGIONAL``: The instance can serve data from more than + one zone in a region (it is highly available). """ class SqlActivationPolicy(proto.Enum): @@ -532,6 +767,8 @@ class SqlDatabaseVersion(proto.Enum): PostgreSQL 12. POSTGRES_13 (8): PostgreSQL 13. + POSTGRES_14 (17): + PostgreSQL 14. """ SQL_DATABASE_VERSION_UNSPECIFIED = 0 MYSQL_5_6 = 1 @@ -542,6 +779,22 @@ class SqlDatabaseVersion(proto.Enum): MYSQL_8_0 = 6 POSTGRES_12 = 7 POSTGRES_13 = 8 + POSTGRES_14 = 17 + + class SqlAvailabilityType(proto.Enum): + r"""The availability type of the given Cloud SQL instance. + + Values: + SQL_AVAILABILITY_TYPE_UNSPECIFIED (0): + This is an unknown Availability type. + ZONAL (1): + Zonal availablility instance. + REGIONAL (2): + Regional availability instance. + """ + SQL_AVAILABILITY_TYPE_UNSPECIFIED = 0 + ZONAL = 1 + REGIONAL = 2 database_version: SqlDatabaseVersion = proto.Field( proto.ENUM, @@ -596,6 +849,10 @@ class SqlDatabaseVersion(proto.Enum): proto.STRING, number=11, ) + secondary_zone: str = proto.Field( + proto.STRING, + number=18, + ) source_id: str = proto.Field( proto.STRING, number=12, @@ -612,16 +869,206 @@ class SqlDatabaseVersion(proto.Enum): proto.STRING, number=15, ) + cmek_key_name: str = proto.Field( + proto.STRING, + number=16, + ) + availability_type: SqlAvailabilityType = proto.Field( + proto.ENUM, + number=17, + enum=SqlAvailabilityType, + ) + + +class AlloyDbSettings(proto.Message): + r"""Settings for creating an AlloyDB cluster. + + Attributes: + initial_user (google.cloud.clouddms_v1.types.AlloyDbSettings.UserPassword): + Required. Input only. Initial user to setup + during cluster creation. Required. + vpc_network (str): + Required. The resource link for the VPC network in which + cluster resources are created and from which they are + accessible via Private IP. The network must belong to the + same project as the cluster. It is specified in the form: + "projects/{project_number}/global/networks/{network_id}". + This is required to create a cluster. + labels (MutableMapping[str, str]): + Labels for the AlloyDB cluster created by + DMS. An object containing a list of 'key', + 'value' pairs. + primary_instance_settings (google.cloud.clouddms_v1.types.AlloyDbSettings.PrimaryInstanceSettings): + + encryption_config (google.cloud.clouddms_v1.types.AlloyDbSettings.EncryptionConfig): + Optional. The encryption config can be + specified to encrypt the data disks and other + persistent data resources of a cluster with a + customer-managed encryption key (CMEK). When + this field is not specified, the cluster will + then use default encryption scheme to protect + the user data. + """ + + class UserPassword(proto.Message): + r"""The username/password for a database user. Used for + specifying initial users at cluster creation time. + + Attributes: + user (str): + The database username. + password (str): + The initial password for the user. + password_set (bool): + Output only. Indicates if the initial_user.password field + has been set. + """ + + user: str = proto.Field( + proto.STRING, + number=1, + ) + password: str = proto.Field( + proto.STRING, + number=2, + ) + password_set: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class PrimaryInstanceSettings(proto.Message): + r"""Settings for the cluster's primary instance + + Attributes: + id (str): + Required. The ID of the AlloyDB primary instance. The ID + must satisfy the regex expression "[a-z0-9-]+". + machine_config (google.cloud.clouddms_v1.types.AlloyDbSettings.PrimaryInstanceSettings.MachineConfig): + Configuration for the machines that host the + underlying database engine. + database_flags (MutableMapping[str, str]): + Database flags to pass to AlloyDB when DMS is + creating the AlloyDB cluster and instances. See + the AlloyDB documentation for how these can be + used. + labels (MutableMapping[str, str]): + Labels for the AlloyDB primary instance + created by DMS. An object containing a list of + 'key', 'value' pairs. + private_ip (str): + Output only. The private IP address for the + Instance. This is the connection endpoint for an + end-user application. + """ + + class MachineConfig(proto.Message): + r"""MachineConfig describes the configuration of a machine. + + Attributes: + cpu_count (int): + The number of CPU's in the VM instance. + """ + + cpu_count: int = proto.Field( + proto.INT32, + number=1, + ) + + id: str = proto.Field( + proto.STRING, + number=1, + ) + machine_config: "AlloyDbSettings.PrimaryInstanceSettings.MachineConfig" = ( + proto.Field( + proto.MESSAGE, + number=2, + message="AlloyDbSettings.PrimaryInstanceSettings.MachineConfig", + ) + ) + database_flags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + private_ip: str = proto.Field( + proto.STRING, + number=8, + ) + + class EncryptionConfig(proto.Message): + r"""EncryptionConfig describes the encryption config of a cluster + that is encrypted with a CMEK (customer-managed encryption key). + + Attributes: + kms_key_name (str): + The fully-qualified resource name of the KMS key. Each Cloud + KMS key is regionalized and has the following format: + projects/[PROJECT]/locations/[REGION]/keyRings/[RING]/cryptoKeys/[KEY_NAME] + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=1, + ) + + initial_user: UserPassword = proto.Field( + proto.MESSAGE, + number=1, + message=UserPassword, + ) + vpc_network: str = proto.Field( + proto.STRING, + number=2, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + primary_instance_settings: PrimaryInstanceSettings = proto.Field( + proto.MESSAGE, + number=4, + message=PrimaryInstanceSettings, + ) + encryption_config: EncryptionConfig = proto.Field( + proto.MESSAGE, + number=5, + message=EncryptionConfig, + ) class StaticIpConnectivity(proto.Message): r"""The source database will allow incoming connections from the - destination database's public IP. You can retrieve the Cloud SQL - instance's public IP from the Cloud SQL console or using Cloud - SQL APIs. No additional configuration is required. + public IP of the destination database. You can retrieve the + public IP of the Cloud SQL instance from the Cloud SQL console + or using Cloud SQL APIs. No additional configuration is + required. + + """ + +class PrivateServiceConnectConnectivity(proto.Message): + r"""Private Service Connect connectivity + (https://cloud.google.com/vpc/docs/private-service-connect#service-attachments) + + Attributes: + service_attachment (str): + Required. A service attachment that exposes a database, and + has the following format: + projects/{project}/regions/{region}/serviceAttachments/{service_attachment_name} """ + service_attachment: str = proto.Field( + proto.STRING, + number=1, + ) + class ReverseSshConnectivity(proto.Message): r"""The details needed to configure a reverse SSH tunnel between @@ -685,6 +1132,76 @@ class VpcPeeringConnectivity(proto.Message): ) +class ForwardSshTunnelConnectivity(proto.Message): + r"""Forward SSH Tunnel connectivity. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hostname (str): + Required. Hostname for the SSH tunnel. + username (str): + Required. Username for the SSH tunnel. + port (int): + Port for the SSH tunnel, default value is 22. + password (str): + Input only. SSH password. + + This field is a member of `oneof`_ ``authentication_method``. + private_key (str): + Input only. SSH private key. + + This field is a member of `oneof`_ ``authentication_method``. + """ + + hostname: str = proto.Field( + proto.STRING, + number=1, + ) + username: str = proto.Field( + proto.STRING, + number=2, + ) + port: int = proto.Field( + proto.INT32, + number=3, + ) + password: str = proto.Field( + proto.STRING, + number=100, + oneof="authentication_method", + ) + private_key: str = proto.Field( + proto.STRING, + number=101, + oneof="authentication_method", + ) + + +class StaticServiceIpConnectivity(proto.Message): + r"""Static IP address connectivity configured on service project.""" + + +class PrivateConnectivity(proto.Message): + r"""Private Connectivity. + + Attributes: + private_connection (str): + Required. The resource name (URI) of the + private connection. + """ + + private_connection: str = proto.Field( + proto.STRING, + number=1, + ) + + class DatabaseType(proto.Message): r"""A message defining the database engine and provider. @@ -721,7 +1238,7 @@ class MigrationJob(proto.Message): name (str): The name (URI) of this migration job resource, in the form of: - projects/{project}/locations/{location}/instances/{instance}. + projects/{project}/locations/{location}/migrationJobs/{migrationJob}. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The timestamp when the migration job resource was created. A timestamp in RFC3339 @@ -750,7 +1267,11 @@ class MigrationJob(proto.Message): Required. The migration job type. dump_path (str): The path to the dump file in Google Cloud Storage, in the - format: (gs://[BUCKET_NAME]/[OBJECT_NAME]). + format: (gs://[BUCKET_NAME]/[OBJECT_NAME]). This field and + the "dump_flags" field are mutually exclusive. + dump_flags (google.cloud.clouddms_v1.types.MigrationJob.DumpFlags): + The initial dump flags. This field and the "dump_path" field + are mutually exclusive. source (str): Required. The resource name (URI) of the source connection profile. @@ -789,6 +1310,30 @@ class MigrationJob(proto.Message): end_time (google.protobuf.timestamp_pb2.Timestamp): Output only. If the migration job is completed, the time when it was completed. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspaceInfo): + The conversion workspace used by the + migration. + filter (str): + This field can be used to select the entities + to migrate as part of the migration job. It uses + AIP-160 notation to select a subset of the + entities configured on the associated + conversion-workspace. This field should not be + set on migration-jobs that are not associated + with a conversion workspace. + cmek_key_name (str): + The CMEK (customer-managed encryption key) fully qualified + key name used for the migration job. This field supports all + migration jobs types except for: + + - Mysql to Mysql (use the cmek field in the cloudsql + connection profile instead). + - PostrgeSQL to PostgreSQL (use the cmek field in the + cloudsql connection profile instead). + - PostgreSQL to AlloyDB (use the kms_key_name field in the + alloydb connection profile instead). Each Cloud CMEK key + has the following format: + projects/[PROJECT]/locations/[REGION]/keyRings/[RING]/cryptoKeys/[KEY_NAME] """ class State(proto.Enum): @@ -888,6 +1433,39 @@ class Type(proto.Enum): ONE_TIME = 1 CONTINUOUS = 2 + class DumpFlag(proto.Message): + r"""Dump flag definition. + + Attributes: + name (str): + The name of the flag + value (str): + The value of the flag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + value: str = proto.Field( + proto.STRING, + number=2, + ) + + class DumpFlags(proto.Message): + r"""Dump flags definition. + + Attributes: + dump_flags (MutableSequence[google.cloud.clouddms_v1.types.MigrationJob.DumpFlag]): + The flags for the initial dump. + """ + + dump_flags: MutableSequence["MigrationJob.DumpFlag"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="MigrationJob.DumpFlag", + ) + name: str = proto.Field( proto.STRING, number=1, @@ -930,6 +1508,11 @@ class Type(proto.Enum): proto.STRING, number=9, ) + dump_flags: DumpFlags = proto.Field( + proto.MESSAGE, + number=17, + message=DumpFlags, + ) source: str = proto.Field( proto.STRING, number=10, @@ -981,6 +1564,40 @@ class Type(proto.Enum): number=16, message=timestamp_pb2.Timestamp, ) + conversion_workspace: "ConversionWorkspaceInfo" = proto.Field( + proto.MESSAGE, + number=18, + message="ConversionWorkspaceInfo", + ) + filter: str = proto.Field( + proto.STRING, + number=20, + ) + cmek_key_name: str = proto.Field( + proto.STRING, + number=21, + ) + + +class ConversionWorkspaceInfo(proto.Message): + r"""A conversion workspace's version. + + Attributes: + name (str): + The resource name (URI) of the conversion + workspace. + commit_id (str): + The commit ID of the conversion workspace. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + commit_id: str = proto.Field( + proto.STRING, + number=2, + ) class ConnectionProfile(proto.Message): @@ -997,7 +1614,7 @@ class ConnectionProfile(proto.Message): name (str): The name of this connection profile resource in the form of - projects/{project}/locations/{location}/instances/{instance}. + projects/{project}/locations/{location}/connectionProfiles/{connectionProfile}. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The timestamp when the resource was created. A timestamp in RFC3339 UTC "Zulu" @@ -1028,10 +1645,18 @@ class ConnectionProfile(proto.Message): postgresql (google.cloud.clouddms_v1.types.PostgreSqlConnectionProfile): A PostgreSQL database connection profile. + This field is a member of `oneof`_ ``connection_profile``. + oracle (google.cloud.clouddms_v1.types.OracleConnectionProfile): + An Oracle database connection profile. + This field is a member of `oneof`_ ``connection_profile``. cloudsql (google.cloud.clouddms_v1.types.CloudSqlConnectionProfile): A CloudSQL database connection profile. + This field is a member of `oneof`_ ``connection_profile``. + alloydb (google.cloud.clouddms_v1.types.AlloyDbConnectionProfile): + An AlloyDB cluster connection profile. + This field is a member of `oneof`_ ``connection_profile``. error (google.rpc.status_pb2.Status): Output only. The error details in case of @@ -1114,12 +1739,24 @@ class State(proto.Enum): oneof="connection_profile", message="PostgreSqlConnectionProfile", ) + oracle: "OracleConnectionProfile" = proto.Field( + proto.MESSAGE, + number=104, + oneof="connection_profile", + message="OracleConnectionProfile", + ) cloudsql: "CloudSqlConnectionProfile" = proto.Field( proto.MESSAGE, number=102, oneof="connection_profile", message="CloudSqlConnectionProfile", ) + alloydb: "AlloyDbConnectionProfile" = proto.Field( + proto.MESSAGE, + number=105, + oneof="connection_profile", + message="AlloyDbConnectionProfile", + ) error: status_pb2.Status = proto.Field( proto.MESSAGE, number=7, @@ -1205,6 +1842,20 @@ class ErrorCode(proto.Enum): CANT_RESTART_RUNNING_MIGRATION (21): Migration is already running at the time of restart request. + TABLES_WITH_LIMITED_SUPPORT (24): + The source has tables with limited support. + E.g. PostgreSQL tables without primary keys. + UNSUPPORTED_DATABASE_LOCALE (25): + The source uses an unsupported locale. + UNSUPPORTED_DATABASE_FDW_CONFIG (26): + The source uses an unsupported Foreign Data + Wrapper configuration. + ERROR_RDBMS (27): + There was an underlying RDBMS error. + SOURCE_SIZE_EXCEEDS_THRESHOLD (28): + The source DB size in Bytes exceeds a certain + threshold. The migration might require an + increase of quota, or might not be supported. """ ERROR_CODE_UNSPECIFIED = 0 CONNECTION_FAILURE = 1 @@ -1226,6 +1877,11 @@ class ErrorCode(proto.Enum): UNSUPPORTED_TABLE_DEFINITION = 18 UNSUPPORTED_DEFINER = 19 CANT_RESTART_RUNNING_MIGRATION = 21 + TABLES_WITH_LIMITED_SUPPORT = 24 + UNSUPPORTED_DATABASE_LOCALE = 25 + UNSUPPORTED_DATABASE_FDW_CONFIG = 26 + ERROR_RDBMS = 27 + SOURCE_SIZE_EXCEEDS_THRESHOLD = 28 error_code: ErrorCode = proto.Field( proto.ENUM, @@ -1242,4 +1898,135 @@ class ErrorCode(proto.Enum): ) +class PrivateConnection(proto.Message): + r"""The PrivateConnection resource is used to establish private + connectivity with the customer's network. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The name of the resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The create time of the resource. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update time of the + resource. + labels (MutableMapping[str, str]): + The resource labels for private connections to use to + annotate any related underlying resources such as Compute + Engine VMs. An object containing a list of "key": "value" + pairs. + + Example: + ``{ "name": "wrench", "mass": "1.3kg", "count": "3" }``. + display_name (str): + The private connection display name. + state (google.cloud.clouddms_v1.types.PrivateConnection.State): + Output only. The state of the private + connection. + error (google.rpc.status_pb2.Status): + Output only. The error details in case of + state FAILED. + vpc_peering_config (google.cloud.clouddms_v1.types.VpcPeeringConfig): + VPC peering configuration. + + This field is a member of `oneof`_ ``connectivity``. + """ + + class State(proto.Enum): + r"""Private Connection state. + + Values: + STATE_UNSPECIFIED (0): + No description available. + CREATING (1): + The private connection is in creation state - + creating resources. + CREATED (2): + The private connection has been created with + all of its resources. + FAILED (3): + The private connection creation has failed. + DELETING (4): + The private connection is being deleted. + FAILED_TO_DELETE (5): + Delete request has failed, resource is in + invalid state. + DELETED (6): + The private connection has been deleted. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + CREATED = 2 + FAILED = 3 + DELETING = 4 + FAILED_TO_DELETE = 5 + DELETED = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=7, + message=status_pb2.Status, + ) + vpc_peering_config: "VpcPeeringConfig" = proto.Field( + proto.MESSAGE, + number=100, + oneof="connectivity", + message="VpcPeeringConfig", + ) + + +class VpcPeeringConfig(proto.Message): + r"""The VPC peering configuration is used to create VPC peering + with the consumer's VPC. + + Attributes: + vpc_name (str): + Required. Fully qualified name of the VPC + that Database Migration Service will peer to. + subnet (str): + Required. A free subnet for peering. (CIDR of + /29) + """ + + vpc_name: str = proto.Field( + proto.STRING, + number=1, + ) + subnet: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/types/conversionworkspace_resources.py b/google/cloud/clouddms_v1/types/conversionworkspace_resources.py similarity index 91% rename from owl-bot-staging/v1/google/cloud/clouddms_v1/types/conversionworkspace_resources.py rename to google/cloud/clouddms_v1/types/conversionworkspace_resources.py index 760ba47..581f67c 100644 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/types/conversionworkspace_resources.py +++ b/google/cloud/clouddms_v1/types/conversionworkspace_resources.py @@ -17,37 +17,36 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.clouddms_v1.types import clouddms_resources from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore +from google.cloud.clouddms_v1.types import clouddms_resources __protobuf__ = proto.module( - package='google.cloud.clouddms.v1', + package="google.cloud.clouddms.v1", manifest={ - 'DatabaseEntityType', - 'BackgroundJobType', - 'ImportRulesFileFormat', - 'DatabaseEngineInfo', - 'ConversionWorkspace', - 'BackgroundJobLogEntry', - 'DatabaseEntity', - 'SchemaEntity', - 'TableEntity', - 'ColumnEntity', - 'ConstraintEntity', - 'IndexEntity', - 'TriggerEntity', - 'ViewEntity', - 'SequenceEntity', - 'StoredProcedureEntity', - 'FunctionEntity', - 'SynonymEntity', - 'PackageEntity', - 'EntityMapping', - 'EntityMappingLogEntry', + "DatabaseEntityType", + "BackgroundJobType", + "ImportRulesFileFormat", + "DatabaseEngineInfo", + "ConversionWorkspace", + "BackgroundJobLogEntry", + "DatabaseEntity", + "SchemaEntity", + "TableEntity", + "ColumnEntity", + "ConstraintEntity", + "IndexEntity", + "TriggerEntity", + "ViewEntity", + "SequenceEntity", + "StoredProcedureEntity", + "FunctionEntity", + "SynonymEntity", + "PackageEntity", + "EntityMapping", + "EntityMappingLogEntry", }, ) @@ -212,15 +211,15 @@ class ConversionWorkspace(proto.Message): proto.STRING, number=1, ) - source: 'DatabaseEngineInfo' = proto.Field( + source: "DatabaseEngineInfo" = proto.Field( proto.MESSAGE, number=2, - message='DatabaseEngineInfo', + message="DatabaseEngineInfo", ) - destination: 'DatabaseEngineInfo' = proto.Field( + destination: "DatabaseEngineInfo" = proto.Field( proto.MESSAGE, number=3, - message='DatabaseEngineInfo', + message="DatabaseEngineInfo", ) global_settings: MutableMapping[str, str] = proto.MapField( proto.STRING, @@ -306,6 +305,7 @@ class BackgroundJobLogEntry(proto.Message): This field is a member of `oneof`_ ``job_details``. """ + class JobCompletionState(proto.Enum): r"""Final state after a job completes. @@ -350,10 +350,10 @@ class ImportRulesJobDetails(proto.Message): proto.STRING, number=1, ) - file_format: 'ImportRulesFileFormat' = proto.Field( + file_format: "ImportRulesFileFormat" = proto.Field( proto.ENUM, number=2, - enum='ImportRulesFileFormat', + enum="ImportRulesFileFormat", ) class ConvertJobDetails(proto.Message): @@ -395,10 +395,10 @@ class ApplyJobDetails(proto.Message): proto.STRING, number=1, ) - job_type: 'BackgroundJobType' = proto.Field( + job_type: "BackgroundJobType" = proto.Field( proto.ENUM, number=2, - enum='BackgroundJobType', + enum="BackgroundJobType", ) start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, @@ -426,25 +426,25 @@ class ApplyJobDetails(proto.Message): seed_job_details: SeedJobDetails = proto.Field( proto.MESSAGE, number=100, - oneof='job_details', + oneof="job_details", message=SeedJobDetails, ) import_rules_job_details: ImportRulesJobDetails = proto.Field( proto.MESSAGE, number=101, - oneof='job_details', + oneof="job_details", message=ImportRulesJobDetails, ) convert_job_details: ConvertJobDetails = proto.Field( proto.MESSAGE, number=102, - oneof='job_details', + oneof="job_details", message=ConvertJobDetails, ) apply_job_details: ApplyJobDetails = proto.Field( proto.MESSAGE, number=103, - oneof='job_details', + oneof="job_details", message=ApplyJobDetails, ) @@ -516,6 +516,7 @@ class DatabaseEntity(proto.Message): This field is a member of `oneof`_ ``entity_body``. """ + class TreeType(proto.Enum): r"""The type of database entities tree. @@ -550,63 +551,63 @@ class TreeType(proto.Enum): number=3, enum=TreeType, ) - entity_type: 'DatabaseEntityType' = proto.Field( + entity_type: "DatabaseEntityType" = proto.Field( proto.ENUM, number=4, - enum='DatabaseEntityType', + enum="DatabaseEntityType", ) - mappings: MutableSequence['EntityMapping'] = proto.RepeatedField( + mappings: MutableSequence["EntityMapping"] = proto.RepeatedField( proto.MESSAGE, number=5, - message='EntityMapping', + message="EntityMapping", ) - schema: 'SchemaEntity' = proto.Field( + schema: "SchemaEntity" = proto.Field( proto.MESSAGE, number=102, - oneof='entity_body', - message='SchemaEntity', + oneof="entity_body", + message="SchemaEntity", ) - table: 'TableEntity' = proto.Field( + table: "TableEntity" = proto.Field( proto.MESSAGE, number=103, - oneof='entity_body', - message='TableEntity', + oneof="entity_body", + message="TableEntity", ) - view: 'ViewEntity' = proto.Field( + view: "ViewEntity" = proto.Field( proto.MESSAGE, number=104, - oneof='entity_body', - message='ViewEntity', + oneof="entity_body", + message="ViewEntity", ) - sequence: 'SequenceEntity' = proto.Field( + sequence: "SequenceEntity" = proto.Field( proto.MESSAGE, number=105, - oneof='entity_body', - message='SequenceEntity', + oneof="entity_body", + message="SequenceEntity", ) - stored_procedure: 'StoredProcedureEntity' = proto.Field( + stored_procedure: "StoredProcedureEntity" = proto.Field( proto.MESSAGE, number=106, - oneof='entity_body', - message='StoredProcedureEntity', + oneof="entity_body", + message="StoredProcedureEntity", ) - database_function: 'FunctionEntity' = proto.Field( + database_function: "FunctionEntity" = proto.Field( proto.MESSAGE, number=107, - oneof='entity_body', - message='FunctionEntity', + oneof="entity_body", + message="FunctionEntity", ) - synonym: 'SynonymEntity' = proto.Field( + synonym: "SynonymEntity" = proto.Field( proto.MESSAGE, number=108, - oneof='entity_body', - message='SynonymEntity', + oneof="entity_body", + message="SynonymEntity", ) - database_package: 'PackageEntity' = proto.Field( + database_package: "PackageEntity" = proto.Field( proto.MESSAGE, number=109, - oneof='entity_body', - message='PackageEntity', + oneof="entity_body", + message="PackageEntity", ) @@ -648,25 +649,25 @@ class TableEntity(proto.Message): Comment associated with the table. """ - columns: MutableSequence['ColumnEntity'] = proto.RepeatedField( + columns: MutableSequence["ColumnEntity"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='ColumnEntity', + message="ColumnEntity", ) - constraints: MutableSequence['ConstraintEntity'] = proto.RepeatedField( + constraints: MutableSequence["ConstraintEntity"] = proto.RepeatedField( proto.MESSAGE, number=2, - message='ConstraintEntity', + message="ConstraintEntity", ) - indices: MutableSequence['IndexEntity'] = proto.RepeatedField( + indices: MutableSequence["IndexEntity"] = proto.RepeatedField( proto.MESSAGE, number=3, - message='IndexEntity', + message="IndexEntity", ) - triggers: MutableSequence['TriggerEntity'] = proto.RepeatedField( + triggers: MutableSequence["TriggerEntity"] = proto.RepeatedField( proto.MESSAGE, number=4, - message='TriggerEntity', + message="TriggerEntity", ) custom_features: struct_pb2.Struct = proto.Field( proto.MESSAGE, @@ -973,10 +974,10 @@ class ViewEntity(proto.Message): number=2, message=struct_pb2.Struct, ) - constraints: MutableSequence['ConstraintEntity'] = proto.RepeatedField( + constraints: MutableSequence["ConstraintEntity"] = proto.RepeatedField( proto.MESSAGE, number=3, - message='ConstraintEntity', + message="ConstraintEntity", ) @@ -1098,10 +1099,10 @@ class SynonymEntity(proto.Message): proto.STRING, number=1, ) - source_type: 'DatabaseEntityType' = proto.Field( + source_type: "DatabaseEntityType" = proto.Field( proto.ENUM, number=2, - enum='DatabaseEntityType', + enum="DatabaseEntityType", ) custom_features: struct_pb2.Struct = proto.Field( proto.MESSAGE, @@ -1175,20 +1176,20 @@ class EntityMapping(proto.Message): proto.STRING, number=2, ) - source_type: 'DatabaseEntityType' = proto.Field( + source_type: "DatabaseEntityType" = proto.Field( proto.ENUM, number=4, - enum='DatabaseEntityType', + enum="DatabaseEntityType", ) - draft_type: 'DatabaseEntityType' = proto.Field( + draft_type: "DatabaseEntityType" = proto.Field( proto.ENUM, number=5, - enum='DatabaseEntityType', + enum="DatabaseEntityType", ) - mapping_log: MutableSequence['EntityMappingLogEntry'] = proto.RepeatedField( + mapping_log: MutableSequence["EntityMappingLogEntry"] = proto.RepeatedField( proto.MESSAGE, number=3, - message='EntityMappingLogEntry', + message="EntityMappingLogEntry", ) diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc deleted file mode 100644 index 437b0aa..0000000 --- a/owl-bot-staging/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/clouddms/__init__.py - google/cloud/clouddms/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/v1/.flake8 b/owl-bot-staging/v1/.flake8 deleted file mode 100644 index 29227d4..0000000 --- a/owl-bot-staging/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in deleted file mode 100644 index b318e50..0000000 --- a/owl-bot-staging/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/clouddms *.py -recursive-include google/cloud/clouddms_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst deleted file mode 100644 index 94ac6f5..0000000 --- a/owl-bot-staging/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Clouddms API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Clouddms API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/clouddms_v1/data_migration_service.rst b/owl-bot-staging/v1/docs/clouddms_v1/data_migration_service.rst deleted file mode 100644 index 86f0b88..0000000 --- a/owl-bot-staging/v1/docs/clouddms_v1/data_migration_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -DataMigrationService --------------------------------------- - -.. automodule:: google.cloud.clouddms_v1.services.data_migration_service - :members: - :inherited-members: - -.. automodule:: google.cloud.clouddms_v1.services.data_migration_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v1/docs/clouddms_v1/services.rst b/owl-bot-staging/v1/docs/clouddms_v1/services.rst deleted file mode 100644 index 89359f2..0000000 --- a/owl-bot-staging/v1/docs/clouddms_v1/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Clouddms v1 API -========================================= -.. toctree:: - :maxdepth: 2 - - data_migration_service diff --git a/owl-bot-staging/v1/docs/clouddms_v1/types.rst b/owl-bot-staging/v1/docs/clouddms_v1/types.rst deleted file mode 100644 index 26b87db..0000000 --- a/owl-bot-staging/v1/docs/clouddms_v1/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Clouddms v1 API -====================================== - -.. automodule:: google.cloud.clouddms_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py deleted file mode 100644 index 9bc1929..0000000 --- a/owl-bot-staging/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-dms documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-dms" -copyright = u"2022, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-dms-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-dms.tex", - u"google-cloud-dms Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-dms", - u"Google Cloud Clouddms Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-dms", - u"google-cloud-dms Documentation", - author, - "google-cloud-dms", - "GAPIC library for Google Cloud Clouddms API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst deleted file mode 100644 index 83f0cba..0000000 --- a/owl-bot-staging/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - clouddms_v1/services - clouddms_v1/types diff --git a/owl-bot-staging/v1/google/cloud/clouddms/__init__.py b/owl-bot-staging/v1/google/cloud/clouddms/__init__.py deleted file mode 100644 index 0850ec8..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms/__init__.py +++ /dev/null @@ -1,219 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.clouddms import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.clouddms_v1.services.data_migration_service.client import DataMigrationServiceClient -from google.cloud.clouddms_v1.services.data_migration_service.async_client import DataMigrationServiceAsyncClient - -from google.cloud.clouddms_v1.types.clouddms import ApplyConversionWorkspaceRequest -from google.cloud.clouddms_v1.types.clouddms import CommitConversionWorkspaceRequest -from google.cloud.clouddms_v1.types.clouddms import ConvertConversionWorkspaceRequest -from google.cloud.clouddms_v1.types.clouddms import CreateConnectionProfileRequest -from google.cloud.clouddms_v1.types.clouddms import CreateConversionWorkspaceRequest -from google.cloud.clouddms_v1.types.clouddms import CreateMigrationJobRequest -from google.cloud.clouddms_v1.types.clouddms import CreatePrivateConnectionRequest -from google.cloud.clouddms_v1.types.clouddms import DeleteConnectionProfileRequest -from google.cloud.clouddms_v1.types.clouddms import DeleteConversionWorkspaceRequest -from google.cloud.clouddms_v1.types.clouddms import DeleteMigrationJobRequest -from google.cloud.clouddms_v1.types.clouddms import DeletePrivateConnectionRequest -from google.cloud.clouddms_v1.types.clouddms import DescribeConversionWorkspaceRevisionsRequest -from google.cloud.clouddms_v1.types.clouddms import DescribeConversionWorkspaceRevisionsResponse -from google.cloud.clouddms_v1.types.clouddms import DescribeDatabaseEntitiesRequest -from google.cloud.clouddms_v1.types.clouddms import DescribeDatabaseEntitiesResponse -from google.cloud.clouddms_v1.types.clouddms import FetchStaticIpsRequest -from google.cloud.clouddms_v1.types.clouddms import FetchStaticIpsResponse -from google.cloud.clouddms_v1.types.clouddms import GenerateSshScriptRequest -from google.cloud.clouddms_v1.types.clouddms import GetConnectionProfileRequest -from google.cloud.clouddms_v1.types.clouddms import GetConversionWorkspaceRequest -from google.cloud.clouddms_v1.types.clouddms import GetMigrationJobRequest -from google.cloud.clouddms_v1.types.clouddms import GetPrivateConnectionRequest -from google.cloud.clouddms_v1.types.clouddms import ImportMappingRulesRequest -from google.cloud.clouddms_v1.types.clouddms import ListConnectionProfilesRequest -from google.cloud.clouddms_v1.types.clouddms import ListConnectionProfilesResponse -from google.cloud.clouddms_v1.types.clouddms import ListConversionWorkspacesRequest -from google.cloud.clouddms_v1.types.clouddms import ListConversionWorkspacesResponse -from google.cloud.clouddms_v1.types.clouddms import ListMigrationJobsRequest -from google.cloud.clouddms_v1.types.clouddms import ListMigrationJobsResponse -from google.cloud.clouddms_v1.types.clouddms import ListPrivateConnectionsRequest -from google.cloud.clouddms_v1.types.clouddms import ListPrivateConnectionsResponse -from google.cloud.clouddms_v1.types.clouddms import OperationMetadata -from google.cloud.clouddms_v1.types.clouddms import PromoteMigrationJobRequest -from google.cloud.clouddms_v1.types.clouddms import RestartMigrationJobRequest -from google.cloud.clouddms_v1.types.clouddms import ResumeMigrationJobRequest -from google.cloud.clouddms_v1.types.clouddms import RollbackConversionWorkspaceRequest -from google.cloud.clouddms_v1.types.clouddms import SearchBackgroundJobsRequest -from google.cloud.clouddms_v1.types.clouddms import SearchBackgroundJobsResponse -from google.cloud.clouddms_v1.types.clouddms import SeedConversionWorkspaceRequest -from google.cloud.clouddms_v1.types.clouddms import SshScript -from google.cloud.clouddms_v1.types.clouddms import StartMigrationJobRequest -from google.cloud.clouddms_v1.types.clouddms import StopMigrationJobRequest -from google.cloud.clouddms_v1.types.clouddms import UpdateConnectionProfileRequest -from google.cloud.clouddms_v1.types.clouddms import UpdateConversionWorkspaceRequest -from google.cloud.clouddms_v1.types.clouddms import UpdateMigrationJobRequest -from google.cloud.clouddms_v1.types.clouddms import VerifyMigrationJobRequest -from google.cloud.clouddms_v1.types.clouddms import VmCreationConfig -from google.cloud.clouddms_v1.types.clouddms import VmSelectionConfig -from google.cloud.clouddms_v1.types.clouddms_resources import AlloyDbConnectionProfile -from google.cloud.clouddms_v1.types.clouddms_resources import AlloyDbSettings -from google.cloud.clouddms_v1.types.clouddms_resources import CloudSqlConnectionProfile -from google.cloud.clouddms_v1.types.clouddms_resources import CloudSqlSettings -from google.cloud.clouddms_v1.types.clouddms_resources import ConnectionProfile -from google.cloud.clouddms_v1.types.clouddms_resources import ConversionWorkspaceInfo -from google.cloud.clouddms_v1.types.clouddms_resources import DatabaseType -from google.cloud.clouddms_v1.types.clouddms_resources import ForwardSshTunnelConnectivity -from google.cloud.clouddms_v1.types.clouddms_resources import MigrationJob -from google.cloud.clouddms_v1.types.clouddms_resources import MigrationJobVerificationError -from google.cloud.clouddms_v1.types.clouddms_resources import MySqlConnectionProfile -from google.cloud.clouddms_v1.types.clouddms_resources import OracleConnectionProfile -from google.cloud.clouddms_v1.types.clouddms_resources import PostgreSqlConnectionProfile -from google.cloud.clouddms_v1.types.clouddms_resources import PrivateConnection -from google.cloud.clouddms_v1.types.clouddms_resources import PrivateConnectivity -from google.cloud.clouddms_v1.types.clouddms_resources import PrivateServiceConnectConnectivity -from google.cloud.clouddms_v1.types.clouddms_resources import ReverseSshConnectivity -from google.cloud.clouddms_v1.types.clouddms_resources import SqlAclEntry -from google.cloud.clouddms_v1.types.clouddms_resources import SqlIpConfig -from google.cloud.clouddms_v1.types.clouddms_resources import SslConfig -from google.cloud.clouddms_v1.types.clouddms_resources import StaticIpConnectivity -from google.cloud.clouddms_v1.types.clouddms_resources import StaticServiceIpConnectivity -from google.cloud.clouddms_v1.types.clouddms_resources import VpcPeeringConfig -from google.cloud.clouddms_v1.types.clouddms_resources import VpcPeeringConnectivity -from google.cloud.clouddms_v1.types.clouddms_resources import DatabaseEngine -from google.cloud.clouddms_v1.types.clouddms_resources import DatabaseProvider -from google.cloud.clouddms_v1.types.clouddms_resources import NetworkArchitecture -from google.cloud.clouddms_v1.types.conversionworkspace_resources import BackgroundJobLogEntry -from google.cloud.clouddms_v1.types.conversionworkspace_resources import ColumnEntity -from google.cloud.clouddms_v1.types.conversionworkspace_resources import ConstraintEntity -from google.cloud.clouddms_v1.types.conversionworkspace_resources import ConversionWorkspace -from google.cloud.clouddms_v1.types.conversionworkspace_resources import DatabaseEngineInfo -from google.cloud.clouddms_v1.types.conversionworkspace_resources import DatabaseEntity -from google.cloud.clouddms_v1.types.conversionworkspace_resources import EntityMapping -from google.cloud.clouddms_v1.types.conversionworkspace_resources import EntityMappingLogEntry -from google.cloud.clouddms_v1.types.conversionworkspace_resources import FunctionEntity -from google.cloud.clouddms_v1.types.conversionworkspace_resources import IndexEntity -from google.cloud.clouddms_v1.types.conversionworkspace_resources import PackageEntity -from google.cloud.clouddms_v1.types.conversionworkspace_resources import SchemaEntity -from google.cloud.clouddms_v1.types.conversionworkspace_resources import SequenceEntity -from google.cloud.clouddms_v1.types.conversionworkspace_resources import StoredProcedureEntity -from google.cloud.clouddms_v1.types.conversionworkspace_resources import SynonymEntity -from google.cloud.clouddms_v1.types.conversionworkspace_resources import TableEntity -from google.cloud.clouddms_v1.types.conversionworkspace_resources import TriggerEntity -from google.cloud.clouddms_v1.types.conversionworkspace_resources import ViewEntity -from google.cloud.clouddms_v1.types.conversionworkspace_resources import BackgroundJobType -from google.cloud.clouddms_v1.types.conversionworkspace_resources import DatabaseEntityType -from google.cloud.clouddms_v1.types.conversionworkspace_resources import ImportRulesFileFormat - -__all__ = ('DataMigrationServiceClient', - 'DataMigrationServiceAsyncClient', - 'ApplyConversionWorkspaceRequest', - 'CommitConversionWorkspaceRequest', - 'ConvertConversionWorkspaceRequest', - 'CreateConnectionProfileRequest', - 'CreateConversionWorkspaceRequest', - 'CreateMigrationJobRequest', - 'CreatePrivateConnectionRequest', - 'DeleteConnectionProfileRequest', - 'DeleteConversionWorkspaceRequest', - 'DeleteMigrationJobRequest', - 'DeletePrivateConnectionRequest', - 'DescribeConversionWorkspaceRevisionsRequest', - 'DescribeConversionWorkspaceRevisionsResponse', - 'DescribeDatabaseEntitiesRequest', - 'DescribeDatabaseEntitiesResponse', - 'FetchStaticIpsRequest', - 'FetchStaticIpsResponse', - 'GenerateSshScriptRequest', - 'GetConnectionProfileRequest', - 'GetConversionWorkspaceRequest', - 'GetMigrationJobRequest', - 'GetPrivateConnectionRequest', - 'ImportMappingRulesRequest', - 'ListConnectionProfilesRequest', - 'ListConnectionProfilesResponse', - 'ListConversionWorkspacesRequest', - 'ListConversionWorkspacesResponse', - 'ListMigrationJobsRequest', - 'ListMigrationJobsResponse', - 'ListPrivateConnectionsRequest', - 'ListPrivateConnectionsResponse', - 'OperationMetadata', - 'PromoteMigrationJobRequest', - 'RestartMigrationJobRequest', - 'ResumeMigrationJobRequest', - 'RollbackConversionWorkspaceRequest', - 'SearchBackgroundJobsRequest', - 'SearchBackgroundJobsResponse', - 'SeedConversionWorkspaceRequest', - 'SshScript', - 'StartMigrationJobRequest', - 'StopMigrationJobRequest', - 'UpdateConnectionProfileRequest', - 'UpdateConversionWorkspaceRequest', - 'UpdateMigrationJobRequest', - 'VerifyMigrationJobRequest', - 'VmCreationConfig', - 'VmSelectionConfig', - 'AlloyDbConnectionProfile', - 'AlloyDbSettings', - 'CloudSqlConnectionProfile', - 'CloudSqlSettings', - 'ConnectionProfile', - 'ConversionWorkspaceInfo', - 'DatabaseType', - 'ForwardSshTunnelConnectivity', - 'MigrationJob', - 'MigrationJobVerificationError', - 'MySqlConnectionProfile', - 'OracleConnectionProfile', - 'PostgreSqlConnectionProfile', - 'PrivateConnection', - 'PrivateConnectivity', - 'PrivateServiceConnectConnectivity', - 'ReverseSshConnectivity', - 'SqlAclEntry', - 'SqlIpConfig', - 'SslConfig', - 'StaticIpConnectivity', - 'StaticServiceIpConnectivity', - 'VpcPeeringConfig', - 'VpcPeeringConnectivity', - 'DatabaseEngine', - 'DatabaseProvider', - 'NetworkArchitecture', - 'BackgroundJobLogEntry', - 'ColumnEntity', - 'ConstraintEntity', - 'ConversionWorkspace', - 'DatabaseEngineInfo', - 'DatabaseEntity', - 'EntityMapping', - 'EntityMappingLogEntry', - 'FunctionEntity', - 'IndexEntity', - 'PackageEntity', - 'SchemaEntity', - 'SequenceEntity', - 'StoredProcedureEntity', - 'SynonymEntity', - 'TableEntity', - 'TriggerEntity', - 'ViewEntity', - 'BackgroundJobType', - 'DatabaseEntityType', - 'ImportRulesFileFormat', -) diff --git a/owl-bot-staging/v1/google/cloud/clouddms/gapic_version.py b/owl-bot-staging/v1/google/cloud/clouddms/gapic_version.py deleted file mode 100644 index 405b1ce..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/clouddms/py.typed b/owl-bot-staging/v1/google/cloud/clouddms/py.typed deleted file mode 100644 index d368a62..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dms package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/__init__.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/__init__.py deleted file mode 100644 index 0656b9b..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/__init__.py +++ /dev/null @@ -1,220 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.clouddms_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.data_migration_service import DataMigrationServiceClient -from .services.data_migration_service import DataMigrationServiceAsyncClient - -from .types.clouddms import ApplyConversionWorkspaceRequest -from .types.clouddms import CommitConversionWorkspaceRequest -from .types.clouddms import ConvertConversionWorkspaceRequest -from .types.clouddms import CreateConnectionProfileRequest -from .types.clouddms import CreateConversionWorkspaceRequest -from .types.clouddms import CreateMigrationJobRequest -from .types.clouddms import CreatePrivateConnectionRequest -from .types.clouddms import DeleteConnectionProfileRequest -from .types.clouddms import DeleteConversionWorkspaceRequest -from .types.clouddms import DeleteMigrationJobRequest -from .types.clouddms import DeletePrivateConnectionRequest -from .types.clouddms import DescribeConversionWorkspaceRevisionsRequest -from .types.clouddms import DescribeConversionWorkspaceRevisionsResponse -from .types.clouddms import DescribeDatabaseEntitiesRequest -from .types.clouddms import DescribeDatabaseEntitiesResponse -from .types.clouddms import FetchStaticIpsRequest -from .types.clouddms import FetchStaticIpsResponse -from .types.clouddms import GenerateSshScriptRequest -from .types.clouddms import GetConnectionProfileRequest -from .types.clouddms import GetConversionWorkspaceRequest -from .types.clouddms import GetMigrationJobRequest -from .types.clouddms import GetPrivateConnectionRequest -from .types.clouddms import ImportMappingRulesRequest -from .types.clouddms import ListConnectionProfilesRequest -from .types.clouddms import ListConnectionProfilesResponse -from .types.clouddms import ListConversionWorkspacesRequest -from .types.clouddms import ListConversionWorkspacesResponse -from .types.clouddms import ListMigrationJobsRequest -from .types.clouddms import ListMigrationJobsResponse -from .types.clouddms import ListPrivateConnectionsRequest -from .types.clouddms import ListPrivateConnectionsResponse -from .types.clouddms import OperationMetadata -from .types.clouddms import PromoteMigrationJobRequest -from .types.clouddms import RestartMigrationJobRequest -from .types.clouddms import ResumeMigrationJobRequest -from .types.clouddms import RollbackConversionWorkspaceRequest -from .types.clouddms import SearchBackgroundJobsRequest -from .types.clouddms import SearchBackgroundJobsResponse -from .types.clouddms import SeedConversionWorkspaceRequest -from .types.clouddms import SshScript -from .types.clouddms import StartMigrationJobRequest -from .types.clouddms import StopMigrationJobRequest -from .types.clouddms import UpdateConnectionProfileRequest -from .types.clouddms import UpdateConversionWorkspaceRequest -from .types.clouddms import UpdateMigrationJobRequest -from .types.clouddms import VerifyMigrationJobRequest -from .types.clouddms import VmCreationConfig -from .types.clouddms import VmSelectionConfig -from .types.clouddms_resources import AlloyDbConnectionProfile -from .types.clouddms_resources import AlloyDbSettings -from .types.clouddms_resources import CloudSqlConnectionProfile -from .types.clouddms_resources import CloudSqlSettings -from .types.clouddms_resources import ConnectionProfile -from .types.clouddms_resources import ConversionWorkspaceInfo -from .types.clouddms_resources import DatabaseType -from .types.clouddms_resources import ForwardSshTunnelConnectivity -from .types.clouddms_resources import MigrationJob -from .types.clouddms_resources import MigrationJobVerificationError -from .types.clouddms_resources import MySqlConnectionProfile -from .types.clouddms_resources import OracleConnectionProfile -from .types.clouddms_resources import PostgreSqlConnectionProfile -from .types.clouddms_resources import PrivateConnection -from .types.clouddms_resources import PrivateConnectivity -from .types.clouddms_resources import PrivateServiceConnectConnectivity -from .types.clouddms_resources import ReverseSshConnectivity -from .types.clouddms_resources import SqlAclEntry -from .types.clouddms_resources import SqlIpConfig -from .types.clouddms_resources import SslConfig -from .types.clouddms_resources import StaticIpConnectivity -from .types.clouddms_resources import StaticServiceIpConnectivity -from .types.clouddms_resources import VpcPeeringConfig -from .types.clouddms_resources import VpcPeeringConnectivity -from .types.clouddms_resources import DatabaseEngine -from .types.clouddms_resources import DatabaseProvider -from .types.clouddms_resources import NetworkArchitecture -from .types.conversionworkspace_resources import BackgroundJobLogEntry -from .types.conversionworkspace_resources import ColumnEntity -from .types.conversionworkspace_resources import ConstraintEntity -from .types.conversionworkspace_resources import ConversionWorkspace -from .types.conversionworkspace_resources import DatabaseEngineInfo -from .types.conversionworkspace_resources import DatabaseEntity -from .types.conversionworkspace_resources import EntityMapping -from .types.conversionworkspace_resources import EntityMappingLogEntry -from .types.conversionworkspace_resources import FunctionEntity -from .types.conversionworkspace_resources import IndexEntity -from .types.conversionworkspace_resources import PackageEntity -from .types.conversionworkspace_resources import SchemaEntity -from .types.conversionworkspace_resources import SequenceEntity -from .types.conversionworkspace_resources import StoredProcedureEntity -from .types.conversionworkspace_resources import SynonymEntity -from .types.conversionworkspace_resources import TableEntity -from .types.conversionworkspace_resources import TriggerEntity -from .types.conversionworkspace_resources import ViewEntity -from .types.conversionworkspace_resources import BackgroundJobType -from .types.conversionworkspace_resources import DatabaseEntityType -from .types.conversionworkspace_resources import ImportRulesFileFormat - -__all__ = ( - 'DataMigrationServiceAsyncClient', -'AlloyDbConnectionProfile', -'AlloyDbSettings', -'ApplyConversionWorkspaceRequest', -'BackgroundJobLogEntry', -'BackgroundJobType', -'CloudSqlConnectionProfile', -'CloudSqlSettings', -'ColumnEntity', -'CommitConversionWorkspaceRequest', -'ConnectionProfile', -'ConstraintEntity', -'ConversionWorkspace', -'ConversionWorkspaceInfo', -'ConvertConversionWorkspaceRequest', -'CreateConnectionProfileRequest', -'CreateConversionWorkspaceRequest', -'CreateMigrationJobRequest', -'CreatePrivateConnectionRequest', -'DataMigrationServiceClient', -'DatabaseEngine', -'DatabaseEngineInfo', -'DatabaseEntity', -'DatabaseEntityType', -'DatabaseProvider', -'DatabaseType', -'DeleteConnectionProfileRequest', -'DeleteConversionWorkspaceRequest', -'DeleteMigrationJobRequest', -'DeletePrivateConnectionRequest', -'DescribeConversionWorkspaceRevisionsRequest', -'DescribeConversionWorkspaceRevisionsResponse', -'DescribeDatabaseEntitiesRequest', -'DescribeDatabaseEntitiesResponse', -'EntityMapping', -'EntityMappingLogEntry', -'FetchStaticIpsRequest', -'FetchStaticIpsResponse', -'ForwardSshTunnelConnectivity', -'FunctionEntity', -'GenerateSshScriptRequest', -'GetConnectionProfileRequest', -'GetConversionWorkspaceRequest', -'GetMigrationJobRequest', -'GetPrivateConnectionRequest', -'ImportMappingRulesRequest', -'ImportRulesFileFormat', -'IndexEntity', -'ListConnectionProfilesRequest', -'ListConnectionProfilesResponse', -'ListConversionWorkspacesRequest', -'ListConversionWorkspacesResponse', -'ListMigrationJobsRequest', -'ListMigrationJobsResponse', -'ListPrivateConnectionsRequest', -'ListPrivateConnectionsResponse', -'MigrationJob', -'MigrationJobVerificationError', -'MySqlConnectionProfile', -'NetworkArchitecture', -'OperationMetadata', -'OracleConnectionProfile', -'PackageEntity', -'PostgreSqlConnectionProfile', -'PrivateConnection', -'PrivateConnectivity', -'PrivateServiceConnectConnectivity', -'PromoteMigrationJobRequest', -'RestartMigrationJobRequest', -'ResumeMigrationJobRequest', -'ReverseSshConnectivity', -'RollbackConversionWorkspaceRequest', -'SchemaEntity', -'SearchBackgroundJobsRequest', -'SearchBackgroundJobsResponse', -'SeedConversionWorkspaceRequest', -'SequenceEntity', -'SqlAclEntry', -'SqlIpConfig', -'SshScript', -'SslConfig', -'StartMigrationJobRequest', -'StaticIpConnectivity', -'StaticServiceIpConnectivity', -'StopMigrationJobRequest', -'StoredProcedureEntity', -'SynonymEntity', -'TableEntity', -'TriggerEntity', -'UpdateConnectionProfileRequest', -'UpdateConversionWorkspaceRequest', -'UpdateMigrationJobRequest', -'VerifyMigrationJobRequest', -'ViewEntity', -'VmCreationConfig', -'VmSelectionConfig', -'VpcPeeringConfig', -'VpcPeeringConnectivity', -) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_metadata.json deleted file mode 100644 index a1d6f43..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_metadata.json +++ /dev/null @@ -1,383 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.clouddms_v1", - "protoPackage": "google.cloud.clouddms.v1", - "schema": "1.0", - "services": { - "DataMigrationService": { - "clients": { - "grpc": { - "libraryClient": "DataMigrationServiceClient", - "rpcs": { - "ApplyConversionWorkspace": { - "methods": [ - "apply_conversion_workspace" - ] - }, - "CommitConversionWorkspace": { - "methods": [ - "commit_conversion_workspace" - ] - }, - "ConvertConversionWorkspace": { - "methods": [ - "convert_conversion_workspace" - ] - }, - "CreateConnectionProfile": { - "methods": [ - "create_connection_profile" - ] - }, - "CreateConversionWorkspace": { - "methods": [ - "create_conversion_workspace" - ] - }, - "CreateMigrationJob": { - "methods": [ - "create_migration_job" - ] - }, - "CreatePrivateConnection": { - "methods": [ - "create_private_connection" - ] - }, - "DeleteConnectionProfile": { - "methods": [ - "delete_connection_profile" - ] - }, - "DeleteConversionWorkspace": { - "methods": [ - "delete_conversion_workspace" - ] - }, - "DeleteMigrationJob": { - "methods": [ - "delete_migration_job" - ] - }, - "DeletePrivateConnection": { - "methods": [ - "delete_private_connection" - ] - }, - "DescribeConversionWorkspaceRevisions": { - "methods": [ - "describe_conversion_workspace_revisions" - ] - }, - "DescribeDatabaseEntities": { - "methods": [ - "describe_database_entities" - ] - }, - "FetchStaticIps": { - "methods": [ - "fetch_static_ips" - ] - }, - "GenerateSshScript": { - "methods": [ - "generate_ssh_script" - ] - }, - "GetConnectionProfile": { - "methods": [ - "get_connection_profile" - ] - }, - "GetConversionWorkspace": { - "methods": [ - "get_conversion_workspace" - ] - }, - "GetMigrationJob": { - "methods": [ - "get_migration_job" - ] - }, - "GetPrivateConnection": { - "methods": [ - "get_private_connection" - ] - }, - "ImportMappingRules": { - "methods": [ - "import_mapping_rules" - ] - }, - "ListConnectionProfiles": { - "methods": [ - "list_connection_profiles" - ] - }, - "ListConversionWorkspaces": { - "methods": [ - "list_conversion_workspaces" - ] - }, - "ListMigrationJobs": { - "methods": [ - "list_migration_jobs" - ] - }, - "ListPrivateConnections": { - "methods": [ - "list_private_connections" - ] - }, - "PromoteMigrationJob": { - "methods": [ - "promote_migration_job" - ] - }, - "RestartMigrationJob": { - "methods": [ - "restart_migration_job" - ] - }, - "ResumeMigrationJob": { - "methods": [ - "resume_migration_job" - ] - }, - "RollbackConversionWorkspace": { - "methods": [ - "rollback_conversion_workspace" - ] - }, - "SearchBackgroundJobs": { - "methods": [ - "search_background_jobs" - ] - }, - "SeedConversionWorkspace": { - "methods": [ - "seed_conversion_workspace" - ] - }, - "StartMigrationJob": { - "methods": [ - "start_migration_job" - ] - }, - "StopMigrationJob": { - "methods": [ - "stop_migration_job" - ] - }, - "UpdateConnectionProfile": { - "methods": [ - "update_connection_profile" - ] - }, - "UpdateConversionWorkspace": { - "methods": [ - "update_conversion_workspace" - ] - }, - "UpdateMigrationJob": { - "methods": [ - "update_migration_job" - ] - }, - "VerifyMigrationJob": { - "methods": [ - "verify_migration_job" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DataMigrationServiceAsyncClient", - "rpcs": { - "ApplyConversionWorkspace": { - "methods": [ - "apply_conversion_workspace" - ] - }, - "CommitConversionWorkspace": { - "methods": [ - "commit_conversion_workspace" - ] - }, - "ConvertConversionWorkspace": { - "methods": [ - "convert_conversion_workspace" - ] - }, - "CreateConnectionProfile": { - "methods": [ - "create_connection_profile" - ] - }, - "CreateConversionWorkspace": { - "methods": [ - "create_conversion_workspace" - ] - }, - "CreateMigrationJob": { - "methods": [ - "create_migration_job" - ] - }, - "CreatePrivateConnection": { - "methods": [ - "create_private_connection" - ] - }, - "DeleteConnectionProfile": { - "methods": [ - "delete_connection_profile" - ] - }, - "DeleteConversionWorkspace": { - "methods": [ - "delete_conversion_workspace" - ] - }, - "DeleteMigrationJob": { - "methods": [ - "delete_migration_job" - ] - }, - "DeletePrivateConnection": { - "methods": [ - "delete_private_connection" - ] - }, - "DescribeConversionWorkspaceRevisions": { - "methods": [ - "describe_conversion_workspace_revisions" - ] - }, - "DescribeDatabaseEntities": { - "methods": [ - "describe_database_entities" - ] - }, - "FetchStaticIps": { - "methods": [ - "fetch_static_ips" - ] - }, - "GenerateSshScript": { - "methods": [ - "generate_ssh_script" - ] - }, - "GetConnectionProfile": { - "methods": [ - "get_connection_profile" - ] - }, - "GetConversionWorkspace": { - "methods": [ - "get_conversion_workspace" - ] - }, - "GetMigrationJob": { - "methods": [ - "get_migration_job" - ] - }, - "GetPrivateConnection": { - "methods": [ - "get_private_connection" - ] - }, - "ImportMappingRules": { - "methods": [ - "import_mapping_rules" - ] - }, - "ListConnectionProfiles": { - "methods": [ - "list_connection_profiles" - ] - }, - "ListConversionWorkspaces": { - "methods": [ - "list_conversion_workspaces" - ] - }, - "ListMigrationJobs": { - "methods": [ - "list_migration_jobs" - ] - }, - "ListPrivateConnections": { - "methods": [ - "list_private_connections" - ] - }, - "PromoteMigrationJob": { - "methods": [ - "promote_migration_job" - ] - }, - "RestartMigrationJob": { - "methods": [ - "restart_migration_job" - ] - }, - "ResumeMigrationJob": { - "methods": [ - "resume_migration_job" - ] - }, - "RollbackConversionWorkspace": { - "methods": [ - "rollback_conversion_workspace" - ] - }, - "SearchBackgroundJobs": { - "methods": [ - "search_background_jobs" - ] - }, - "SeedConversionWorkspace": { - "methods": [ - "seed_conversion_workspace" - ] - }, - "StartMigrationJob": { - "methods": [ - "start_migration_job" - ] - }, - "StopMigrationJob": { - "methods": [ - "stop_migration_job" - ] - }, - "UpdateConnectionProfile": { - "methods": [ - "update_connection_profile" - ] - }, - "UpdateConversionWorkspace": { - "methods": [ - "update_conversion_workspace" - ] - }, - "UpdateMigrationJob": { - "methods": [ - "update_migration_job" - ] - }, - "VerifyMigrationJob": { - "methods": [ - "verify_migration_job" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_version.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_version.py deleted file mode 100644 index 405b1ce..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/py.typed b/owl-bot-staging/v1/google/cloud/clouddms_v1/py.typed deleted file mode 100644 index d368a62..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dms package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/__init__.py deleted file mode 100644 index e8e1c38..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/__init__.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/__init__.py deleted file mode 100644 index 253bb20..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DataMigrationServiceClient -from .async_client import DataMigrationServiceAsyncClient - -__all__ = ( - 'DataMigrationServiceClient', - 'DataMigrationServiceAsyncClient', -) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/async_client.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/async_client.py deleted file mode 100644 index 31a1e73..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/async_client.py +++ /dev/null @@ -1,4804 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.clouddms_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.clouddms_v1.services.data_migration_service import pagers -from google.cloud.clouddms_v1.types import clouddms -from google.cloud.clouddms_v1.types import clouddms_resources -from google.cloud.clouddms_v1.types import conversionworkspace_resources -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import DataMigrationServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DataMigrationServiceGrpcAsyncIOTransport -from .client import DataMigrationServiceClient - - -class DataMigrationServiceAsyncClient: - """Database Migration service""" - - _client: DataMigrationServiceClient - - DEFAULT_ENDPOINT = DataMigrationServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DataMigrationServiceClient.DEFAULT_MTLS_ENDPOINT - - connection_profile_path = staticmethod(DataMigrationServiceClient.connection_profile_path) - parse_connection_profile_path = staticmethod(DataMigrationServiceClient.parse_connection_profile_path) - conversion_workspace_path = staticmethod(DataMigrationServiceClient.conversion_workspace_path) - parse_conversion_workspace_path = staticmethod(DataMigrationServiceClient.parse_conversion_workspace_path) - migration_job_path = staticmethod(DataMigrationServiceClient.migration_job_path) - parse_migration_job_path = staticmethod(DataMigrationServiceClient.parse_migration_job_path) - networks_path = staticmethod(DataMigrationServiceClient.networks_path) - parse_networks_path = staticmethod(DataMigrationServiceClient.parse_networks_path) - private_connection_path = staticmethod(DataMigrationServiceClient.private_connection_path) - parse_private_connection_path = staticmethod(DataMigrationServiceClient.parse_private_connection_path) - common_billing_account_path = staticmethod(DataMigrationServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DataMigrationServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DataMigrationServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(DataMigrationServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DataMigrationServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(DataMigrationServiceClient.parse_common_organization_path) - common_project_path = staticmethod(DataMigrationServiceClient.common_project_path) - parse_common_project_path = staticmethod(DataMigrationServiceClient.parse_common_project_path) - common_location_path = staticmethod(DataMigrationServiceClient.common_location_path) - parse_common_location_path = staticmethod(DataMigrationServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataMigrationServiceAsyncClient: The constructed client. - """ - return DataMigrationServiceClient.from_service_account_info.__func__(DataMigrationServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataMigrationServiceAsyncClient: The constructed client. - """ - return DataMigrationServiceClient.from_service_account_file.__func__(DataMigrationServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DataMigrationServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DataMigrationServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataMigrationServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(DataMigrationServiceClient).get_transport_class, type(DataMigrationServiceClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DataMigrationServiceTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data migration service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.DataMigrationServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DataMigrationServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def list_migration_jobs(self, - request: Optional[Union[clouddms.ListMigrationJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListMigrationJobsAsyncPager: - r"""Lists migration jobs in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_list_migration_jobs(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.ListMigrationJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.ListMigrationJobsRequest, dict]]): - The request object. Retrieves a list of all migration - jobs in a given project and location. - parent (:class:`str`): - Required. The parent which owns this - collection of migrationJobs. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsAsyncPager: - Response message for - 'ListMigrationJobs' request. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.ListMigrationJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_migration_jobs, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListMigrationJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_migration_job(self, - request: Optional[Union[clouddms.GetMigrationJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clouddms_resources.MigrationJob: - r"""Gets details of a single migration job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_get_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.GetMigrationJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_migration_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.GetMigrationJobRequest, dict]]): - The request object. Request message for 'GetMigrationJob' - request. - name (:class:`str`): - Required. Name of the migration job - resource to get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.types.MigrationJob: - Represents a Database Migration - Service migration job object. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.GetMigrationJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_migration_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_migration_job(self, - request: Optional[Union[clouddms.CreateMigrationJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - migration_job: Optional[clouddms_resources.MigrationJob] = None, - migration_job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new migration job in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_create_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - migration_job = clouddms_v1.MigrationJob() - migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" - migration_job.reverse_ssh_connectivity.vm_port = 775 - migration_job.type_ = "CONTINUOUS" - migration_job.source = "source_value" - migration_job.destination = "destination_value" - - request = clouddms_v1.CreateMigrationJobRequest( - parent="parent_value", - migration_job_id="migration_job_id_value", - migration_job=migration_job, - ) - - # Make the request - operation = client.create_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.CreateMigrationJobRequest, dict]]): - The request object. Request message to create a new - Database Migration Service migration job - in the specified project and region. - parent (:class:`str`): - Required. The parent which owns this - collection of migration jobs. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - migration_job (:class:`google.cloud.clouddms_v1.types.MigrationJob`): - Required. Represents a `migration - job `__ - object. - - This corresponds to the ``migration_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - migration_job_id (:class:`str`): - Required. The ID of the instance to - create. - - This corresponds to the ``migration_job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, migration_job, migration_job_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.CreateMigrationJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if migration_job is not None: - request.migration_job = migration_job - if migration_job_id is not None: - request.migration_job_id = migration_job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_migration_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_migration_job(self, - request: Optional[Union[clouddms.UpdateMigrationJobRequest, dict]] = None, - *, - migration_job: Optional[clouddms_resources.MigrationJob] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates the parameters of a single migration job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_update_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - migration_job = clouddms_v1.MigrationJob() - migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" - migration_job.reverse_ssh_connectivity.vm_port = 775 - migration_job.type_ = "CONTINUOUS" - migration_job.source = "source_value" - migration_job.destination = "destination_value" - - request = clouddms_v1.UpdateMigrationJobRequest( - migration_job=migration_job, - ) - - # Make the request - operation = client.update_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.UpdateMigrationJobRequest, dict]]): - The request object. Request message for - 'UpdateMigrationJob' request. - migration_job (:class:`google.cloud.clouddms_v1.types.MigrationJob`): - Required. The migration job - parameters to update. - - This corresponds to the ``migration_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Field mask is used to - specify the fields to be overwritten by - the update in the conversion workspace - resource. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([migration_job, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.UpdateMigrationJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if migration_job is not None: - request.migration_job = migration_job - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_migration_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("migration_job.name", request.migration_job.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_migration_job(self, - request: Optional[Union[clouddms.DeleteMigrationJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single migration job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_delete_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.DeleteMigrationJobRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.DeleteMigrationJobRequest, dict]]): - The request object. Request message for - 'DeleteMigrationJob' request. - name (:class:`str`): - Required. Name of the migration job - resource to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.DeleteMigrationJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_migration_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def start_migration_job(self, - request: Optional[Union[clouddms.StartMigrationJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Start an already created migration job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_start_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.StartMigrationJobRequest( - ) - - # Make the request - operation = client.start_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.StartMigrationJobRequest, dict]]): - The request object. Request message for - 'StartMigrationJob' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - request = clouddms.StartMigrationJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.start_migration_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def stop_migration_job(self, - request: Optional[Union[clouddms.StopMigrationJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Stops a running migration job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_stop_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.StopMigrationJobRequest( - ) - - # Make the request - operation = client.stop_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.StopMigrationJobRequest, dict]]): - The request object. Request message for - 'StopMigrationJob' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - request = clouddms.StopMigrationJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.stop_migration_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def resume_migration_job(self, - request: Optional[Union[clouddms.ResumeMigrationJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Resume a migration job that is currently stopped and - is resumable (was stopped during CDC phase). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_resume_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.ResumeMigrationJobRequest( - ) - - # Make the request - operation = client.resume_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.ResumeMigrationJobRequest, dict]]): - The request object. Request message for - 'ResumeMigrationJob' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - request = clouddms.ResumeMigrationJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.resume_migration_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def promote_migration_job(self, - request: Optional[Union[clouddms.PromoteMigrationJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Promote a migration job, stopping replication to the - destination and promoting the destination to be a - standalone database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_promote_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.PromoteMigrationJobRequest( - ) - - # Make the request - operation = client.promote_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.PromoteMigrationJobRequest, dict]]): - The request object. Request message for - 'PromoteMigrationJob' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - request = clouddms.PromoteMigrationJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.promote_migration_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def verify_migration_job(self, - request: Optional[Union[clouddms.VerifyMigrationJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Verify a migration job, making sure the destination - can reach the source and that all configuration and - prerequisites are met. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_verify_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.VerifyMigrationJobRequest( - ) - - # Make the request - operation = client.verify_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.VerifyMigrationJobRequest, dict]]): - The request object. Request message for - 'VerifyMigrationJob' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - request = clouddms.VerifyMigrationJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.verify_migration_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def restart_migration_job(self, - request: Optional[Union[clouddms.RestartMigrationJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Restart a stopped or failed migration job, resetting - the destination instance to its original state and - starting the migration process from scratch. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_restart_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.RestartMigrationJobRequest( - ) - - # Make the request - operation = client.restart_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.RestartMigrationJobRequest, dict]]): - The request object. Request message for - 'RestartMigrationJob' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - request = clouddms.RestartMigrationJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.restart_migration_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def generate_ssh_script(self, - request: Optional[Union[clouddms.GenerateSshScriptRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clouddms.SshScript: - r"""Generate a SSH configuration script to configure the - reverse SSH connectivity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_generate_ssh_script(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - vm_creation_config = clouddms_v1.VmCreationConfig() - vm_creation_config.vm_machine_type = "vm_machine_type_value" - - request = clouddms_v1.GenerateSshScriptRequest( - vm_creation_config=vm_creation_config, - vm="vm_value", - ) - - # Make the request - response = await client.generate_ssh_script(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.GenerateSshScriptRequest, dict]]): - The request object. Request message for - 'GenerateSshScript' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.types.SshScript: - Response message for - 'GenerateSshScript' request. - - """ - # Create or coerce a protobuf request object. - request = clouddms.GenerateSshScriptRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.generate_ssh_script, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("migration_job", request.migration_job), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_connection_profiles(self, - request: Optional[Union[clouddms.ListConnectionProfilesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListConnectionProfilesAsyncPager: - r"""Retrieves a list of all connection profiles in a - given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_list_connection_profiles(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.ListConnectionProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connection_profiles(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.ListConnectionProfilesRequest, dict]]): - The request object. Request message for - 'ListConnectionProfiles' request. - parent (:class:`str`): - Required. The parent which owns this - collection of connection profiles. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesAsyncPager: - Response message for - 'ListConnectionProfiles' request. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.ListConnectionProfilesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_connection_profiles, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListConnectionProfilesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_connection_profile(self, - request: Optional[Union[clouddms.GetConnectionProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clouddms_resources.ConnectionProfile: - r"""Gets details of a single connection profile. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_get_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.GetConnectionProfileRequest( - name="name_value", - ) - - # Make the request - response = await client.get_connection_profile(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.GetConnectionProfileRequest, dict]]): - The request object. Request message for - 'GetConnectionProfile' request. - name (:class:`str`): - Required. Name of the connection - profile resource to get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.types.ConnectionProfile: - A connection profile definition. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.GetConnectionProfileRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_connection_profile, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_connection_profile(self, - request: Optional[Union[clouddms.CreateConnectionProfileRequest, dict]] = None, - *, - parent: Optional[str] = None, - connection_profile: Optional[clouddms_resources.ConnectionProfile] = None, - connection_profile_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new connection profile in a given project - and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_create_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - connection_profile = clouddms_v1.ConnectionProfile() - connection_profile.mysql.host = "host_value" - connection_profile.mysql.port = 453 - connection_profile.mysql.username = "username_value" - connection_profile.mysql.password = "password_value" - - request = clouddms_v1.CreateConnectionProfileRequest( - parent="parent_value", - connection_profile_id="connection_profile_id_value", - connection_profile=connection_profile, - ) - - # Make the request - operation = client.create_connection_profile(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.CreateConnectionProfileRequest, dict]]): - The request object. Request message for - 'CreateConnectionProfile' request. - parent (:class:`str`): - Required. The parent which owns this - collection of connection profiles. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection_profile (:class:`google.cloud.clouddms_v1.types.ConnectionProfile`): - Required. The create request body - including the connection profile data - - This corresponds to the ``connection_profile`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection_profile_id (:class:`str`): - Required. The connection profile - identifier. - - This corresponds to the ``connection_profile_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConnectionProfile` - A connection profile definition. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, connection_profile, connection_profile_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.CreateConnectionProfileRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if connection_profile is not None: - request.connection_profile = connection_profile - if connection_profile_id is not None: - request.connection_profile_id = connection_profile_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_connection_profile, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clouddms_resources.ConnectionProfile, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_connection_profile(self, - request: Optional[Union[clouddms.UpdateConnectionProfileRequest, dict]] = None, - *, - connection_profile: Optional[clouddms_resources.ConnectionProfile] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Update the configuration of a single connection - profile. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_update_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - connection_profile = clouddms_v1.ConnectionProfile() - connection_profile.mysql.host = "host_value" - connection_profile.mysql.port = 453 - connection_profile.mysql.username = "username_value" - connection_profile.mysql.password = "password_value" - - request = clouddms_v1.UpdateConnectionProfileRequest( - connection_profile=connection_profile, - ) - - # Make the request - operation = client.update_connection_profile(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.UpdateConnectionProfileRequest, dict]]): - The request object. Request message for - 'UpdateConnectionProfile' request. - connection_profile (:class:`google.cloud.clouddms_v1.types.ConnectionProfile`): - Required. The connection profile - parameters to update. - - This corresponds to the ``connection_profile`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Field mask is used to - specify the fields to be overwritten by - the update in the conversion workspace - resource. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConnectionProfile` - A connection profile definition. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([connection_profile, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.UpdateConnectionProfileRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if connection_profile is not None: - request.connection_profile = connection_profile - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_connection_profile, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("connection_profile.name", request.connection_profile.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clouddms_resources.ConnectionProfile, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_connection_profile(self, - request: Optional[Union[clouddms.DeleteConnectionProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single Database Migration Service - connection profile. A connection profile can only be - deleted if it is not in use by any active migration - jobs. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_delete_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.DeleteConnectionProfileRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_connection_profile(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest, dict]]): - The request object. Request message for - 'DeleteConnectionProfile' request. - name (:class:`str`): - Required. Name of the connection - profile resource to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.DeleteConnectionProfileRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_connection_profile, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def create_private_connection(self, - request: Optional[Union[clouddms.CreatePrivateConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - private_connection: Optional[clouddms_resources.PrivateConnection] = None, - private_connection_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new private connection in a given project - and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_create_private_connection(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - private_connection = clouddms_v1.PrivateConnection() - private_connection.vpc_peering_config.vpc_name = "vpc_name_value" - private_connection.vpc_peering_config.subnet = "subnet_value" - - request = clouddms_v1.CreatePrivateConnectionRequest( - parent="parent_value", - private_connection_id="private_connection_id_value", - private_connection=private_connection, - ) - - # Make the request - operation = client.create_private_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest, dict]]): - The request object. Request message to create a new - private connection in the specified - project and region. - parent (:class:`str`): - Required. The parent that owns the - collection of PrivateConnections. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - private_connection (:class:`google.cloud.clouddms_v1.types.PrivateConnection`): - Required. The private connection - resource to create. - - This corresponds to the ``private_connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - private_connection_id (:class:`str`): - Required. The private connection - identifier. - - This corresponds to the ``private_connection_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.clouddms_v1.types.PrivateConnection` The PrivateConnection resource is used to establish private connectivity - with the customer's network. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, private_connection, private_connection_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.CreatePrivateConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if private_connection is not None: - request.private_connection = private_connection - if private_connection_id is not None: - request.private_connection_id = private_connection_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_private_connection, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clouddms_resources.PrivateConnection, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_private_connection(self, - request: Optional[Union[clouddms.GetPrivateConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clouddms_resources.PrivateConnection: - r"""Gets details of a single private connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_get_private_connection(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.GetPrivateConnectionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_private_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.GetPrivateConnectionRequest, dict]]): - The request object. Request message to get a private - connection resource. - name (:class:`str`): - Required. The name of the private - connection to get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.types.PrivateConnection: - The PrivateConnection resource is - used to establish private connectivity - with the customer's network. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.GetPrivateConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_private_connection, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_private_connections(self, - request: Optional[Union[clouddms.ListPrivateConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListPrivateConnectionsAsyncPager: - r"""Retrieves a list of private connections in a given - project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_list_private_connections(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.ListPrivateConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_private_connections(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest, dict]]): - The request object. Request message to retrieve a list of - private connections in a given project - and location. - parent (:class:`str`): - Required. The parent that owns the - collection of private connections. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsAsyncPager: - Response message for - 'ListPrivateConnections' request. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.ListPrivateConnectionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_private_connections, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListPrivateConnectionsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_private_connection(self, - request: Optional[Union[clouddms.DeletePrivateConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single Database Migration Service private - connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_delete_private_connection(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.DeletePrivateConnectionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_private_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest, dict]]): - The request object. Request message to delete a private - connection. - name (:class:`str`): - Required. The name of the private - connection to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.DeletePrivateConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_private_connection, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_conversion_workspace(self, - request: Optional[Union[clouddms.GetConversionWorkspaceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> conversionworkspace_resources.ConversionWorkspace: - r"""Gets details of a single conversion workspace. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_get_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.GetConversionWorkspaceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_conversion_workspace(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest, dict]]): - The request object. Request message for - 'GetConversionWorkspace' request. - name (:class:`str`): - Required. Name of the conversion - workspace resource to get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.types.ConversionWorkspace: - The main conversion workspace - resource entity. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.GetConversionWorkspaceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_conversion_workspace, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_conversion_workspaces(self, - request: Optional[Union[clouddms.ListConversionWorkspacesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListConversionWorkspacesAsyncPager: - r"""Lists conversion workspaces in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_list_conversion_workspaces(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.ListConversionWorkspacesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_conversion_workspaces(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest, dict]]): - The request object. Retrieve a list of all conversion - workspaces in a given project and - location. - parent (:class:`str`): - Required. The parent which owns this - collection of conversion workspaces. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesAsyncPager: - Response message for - 'ListConversionWorkspaces' request. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.ListConversionWorkspacesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_conversion_workspaces, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListConversionWorkspacesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_conversion_workspace(self, - request: Optional[Union[clouddms.CreateConversionWorkspaceRequest, dict]] = None, - *, - parent: Optional[str] = None, - conversion_workspace: Optional[conversionworkspace_resources.ConversionWorkspace] = None, - conversion_workspace_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new conversion workspace in a given project - and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_create_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - conversion_workspace = clouddms_v1.ConversionWorkspace() - conversion_workspace.source.engine = "ORACLE" - conversion_workspace.source.version = "version_value" - conversion_workspace.destination.engine = "ORACLE" - conversion_workspace.destination.version = "version_value" - - request = clouddms_v1.CreateConversionWorkspaceRequest( - parent="parent_value", - conversion_workspace_id="conversion_workspace_id_value", - conversion_workspace=conversion_workspace, - ) - - # Make the request - operation = client.create_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest, dict]]): - The request object. Request message to create a new - Conversion Workspace in the specified - project and region. - parent (:class:`str`): - Required. The parent which owns this - collection of conversion workspaces. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - conversion_workspace (:class:`google.cloud.clouddms_v1.types.ConversionWorkspace`): - Required. Represents a conversion - workspace object. - - This corresponds to the ``conversion_workspace`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - conversion_workspace_id (:class:`str`): - Required. The ID of the conversion - workspace to create. - - This corresponds to the ``conversion_workspace_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, conversion_workspace, conversion_workspace_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.CreateConversionWorkspaceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if conversion_workspace is not None: - request.conversion_workspace = conversion_workspace - if conversion_workspace_id is not None: - request.conversion_workspace_id = conversion_workspace_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_conversion_workspace, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_conversion_workspace(self, - request: Optional[Union[clouddms.UpdateConversionWorkspaceRequest, dict]] = None, - *, - conversion_workspace: Optional[conversionworkspace_resources.ConversionWorkspace] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates the parameters of a single conversion - workspace. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_update_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - conversion_workspace = clouddms_v1.ConversionWorkspace() - conversion_workspace.source.engine = "ORACLE" - conversion_workspace.source.version = "version_value" - conversion_workspace.destination.engine = "ORACLE" - conversion_workspace.destination.version = "version_value" - - request = clouddms_v1.UpdateConversionWorkspaceRequest( - conversion_workspace=conversion_workspace, - ) - - # Make the request - operation = client.update_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest, dict]]): - The request object. Request message for - 'UpdateConversionWorkspace' request. - conversion_workspace (:class:`google.cloud.clouddms_v1.types.ConversionWorkspace`): - Required. The conversion workspace - parameters to update. - - This corresponds to the ``conversion_workspace`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Field mask is used to - specify the fields to be overwritten by - the update in the conversion workspace - resource. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([conversion_workspace, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.UpdateConversionWorkspaceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if conversion_workspace is not None: - request.conversion_workspace = conversion_workspace - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_conversion_workspace, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("conversion_workspace.name", request.conversion_workspace.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_conversion_workspace(self, - request: Optional[Union[clouddms.DeleteConversionWorkspaceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single conversion workspace. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_delete_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.DeleteConversionWorkspaceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest, dict]]): - The request object. Request message for - 'DeleteConversionWorkspace' request. - name (:class:`str`): - Required. Name of the conversion - workspace resource to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.DeleteConversionWorkspaceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_conversion_workspace, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def seed_conversion_workspace(self, - request: Optional[Union[clouddms.SeedConversionWorkspaceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Imports a snapshot of the source database into the - conversion workspace. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_seed_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.SeedConversionWorkspaceRequest( - source_connection_profile="source_connection_profile_value", - ) - - # Make the request - operation = client.seed_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest, dict]]): - The request object. Request message for - 'SeedConversionWorkspace' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - request = clouddms.SeedConversionWorkspaceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.seed_conversion_workspace, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def import_mapping_rules(self, - request: Optional[Union[clouddms.ImportMappingRulesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Imports the mapping rules for a given conversion - workspace. Supports various formats of external rules - files. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_import_mapping_rules(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.ImportMappingRulesRequest( - parent="parent_value", - ) - - # Make the request - operation = client.import_mapping_rules(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.ImportMappingRulesRequest, dict]]): - The request object. Request message for - 'ImportMappingRules' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - request = clouddms.ImportMappingRulesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.import_mapping_rules, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def convert_conversion_workspace(self, - request: Optional[Union[clouddms.ConvertConversionWorkspaceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a draft tree schema for the destination - database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_convert_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.ConvertConversionWorkspaceRequest( - ) - - # Make the request - operation = client.convert_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest, dict]]): - The request object. Request message for - 'ConvertConversionWorkspace' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - request = clouddms.ConvertConversionWorkspaceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.convert_conversion_workspace, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def commit_conversion_workspace(self, - request: Optional[Union[clouddms.CommitConversionWorkspaceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Marks all the data in the conversion workspace as - committed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_commit_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.CommitConversionWorkspaceRequest( - name="name_value", - ) - - # Make the request - operation = client.commit_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest, dict]]): - The request object. Request message for - 'CommitConversionWorkspace' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - request = clouddms.CommitConversionWorkspaceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.commit_conversion_workspace, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def rollback_conversion_workspace(self, - request: Optional[Union[clouddms.RollbackConversionWorkspaceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Rolls back a conversion workspace to the last - committed snapshot. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_rollback_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.RollbackConversionWorkspaceRequest( - name="name_value", - ) - - # Make the request - operation = client.rollback_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest, dict]]): - The request object. Request message for - 'RollbackConversionWorkspace' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - request = clouddms.RollbackConversionWorkspaceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.rollback_conversion_workspace, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def apply_conversion_workspace(self, - request: Optional[Union[clouddms.ApplyConversionWorkspaceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Applies draft tree onto a specific destination - database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_apply_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.ApplyConversionWorkspaceRequest( - connection_profile="connection_profile_value", - name="name_value", - ) - - # Make the request - operation = client.apply_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest, dict]]): - The request object. Request message for - 'ApplyConversionWorkspace' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - request = clouddms.ApplyConversionWorkspaceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.apply_conversion_workspace, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - async def describe_database_entities(self, - request: Optional[Union[clouddms.DescribeDatabaseEntitiesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.DescribeDatabaseEntitiesAsyncPager: - r"""Describes the database entities tree for a specific - conversion workspace and a specific tree type. - - Database entities are not resources like conversion - workspaces or mapping rules, and they can't be created, - updated or deleted. Instead, they are simple data - objects describing the structure of the client database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_describe_database_entities(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.DescribeDatabaseEntitiesRequest( - conversion_workspace="conversion_workspace_value", - ) - - # Make the request - page_result = client.describe_database_entities(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest, dict]]): - The request object. Request message for - 'DescribeDatabaseEntities' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesAsyncPager: - Response message for - 'DescribeDatabaseEntities' request. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - request = clouddms.DescribeDatabaseEntitiesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.describe_database_entities, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("conversion_workspace", request.conversion_workspace), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.DescribeDatabaseEntitiesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def search_background_jobs(self, - request: Optional[Union[clouddms.SearchBackgroundJobsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clouddms.SearchBackgroundJobsResponse: - r"""Searches/lists the background jobs for a specific - conversion workspace. - - The background jobs are not resources like conversion - workspaces or mapping rules, and they can't be created, - updated or deleted. Instead, they are a way to expose - the data plane jobs log. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_search_background_jobs(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.SearchBackgroundJobsRequest( - conversion_workspace="conversion_workspace_value", - ) - - # Make the request - response = await client.search_background_jobs(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest, dict]]): - The request object. Request message for - 'SearchBackgroundJobs' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse: - Response message for - 'SearchBackgroundJobs' request. - - """ - # Create or coerce a protobuf request object. - request = clouddms.SearchBackgroundJobsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.search_background_jobs, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("conversion_workspace", request.conversion_workspace), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def describe_conversion_workspace_revisions(self, - request: Optional[Union[clouddms.DescribeConversionWorkspaceRevisionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clouddms.DescribeConversionWorkspaceRevisionsResponse: - r"""Retrieves a list of committed revisions of a specific - conversion workspace. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_describe_conversion_workspace_revisions(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( - conversion_workspace="conversion_workspace_value", - ) - - # Make the request - response = await client.describe_conversion_workspace_revisions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest, dict]]): - The request object. Request message for - 'DescribeConversionWorkspaceRevisions' - request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse: - Response message for - 'DescribeConversionWorkspaceRevisions' - request. - - """ - # Create or coerce a protobuf request object. - request = clouddms.DescribeConversionWorkspaceRevisionsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.describe_conversion_workspace_revisions, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("conversion_workspace", request.conversion_workspace), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def fetch_static_ips(self, - request: Optional[Union[clouddms.FetchStaticIpsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.FetchStaticIpsAsyncPager: - r"""Fetches a set of static IP addresses that need to be - allowlisted by the customer when using the static-IP - connectivity method. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - async def sample_fetch_static_ips(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.FetchStaticIpsRequest( - name="name_value", - ) - - # Make the request - page_result = client.fetch_static_ips(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.clouddms_v1.types.FetchStaticIpsRequest, dict]]): - The request object. Request message for 'FetchStaticIps' - request. - name (:class:`str`): - Required. The resource name for the location for which - static IPs should be returned. Must be in the format - ``projects/*/locations/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsAsyncPager: - Response message for a - 'FetchStaticIps' request. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clouddms.FetchStaticIpsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.fetch_static_ips, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.FetchStaticIpsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataMigrationServiceAsyncClient", -) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/client.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/client.py deleted file mode 100644 index 6cae9f9..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/client.py +++ /dev/null @@ -1,5053 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.clouddms_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.clouddms_v1.services.data_migration_service import pagers -from google.cloud.clouddms_v1.types import clouddms -from google.cloud.clouddms_v1.types import clouddms_resources -from google.cloud.clouddms_v1.types import conversionworkspace_resources -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import DataMigrationServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DataMigrationServiceGrpcTransport -from .transports.grpc_asyncio import DataMigrationServiceGrpcAsyncIOTransport - - -class DataMigrationServiceClientMeta(type): - """Metaclass for the DataMigrationService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DataMigrationServiceTransport]] - _transport_registry["grpc"] = DataMigrationServiceGrpcTransport - _transport_registry["grpc_asyncio"] = DataMigrationServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DataMigrationServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DataMigrationServiceClient(metaclass=DataMigrationServiceClientMeta): - """Database Migration service""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "datamigration.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataMigrationServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataMigrationServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DataMigrationServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataMigrationServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def connection_profile_path(project: str,location: str,connection_profile: str,) -> str: - """Returns a fully-qualified connection_profile string.""" - return "projects/{project}/locations/{location}/connectionProfiles/{connection_profile}".format(project=project, location=location, connection_profile=connection_profile, ) - - @staticmethod - def parse_connection_profile_path(path: str) -> Dict[str,str]: - """Parses a connection_profile path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connectionProfiles/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def conversion_workspace_path(project: str,location: str,conversion_workspace: str,) -> str: - """Returns a fully-qualified conversion_workspace string.""" - return "projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}".format(project=project, location=location, conversion_workspace=conversion_workspace, ) - - @staticmethod - def parse_conversion_workspace_path(path: str) -> Dict[str,str]: - """Parses a conversion_workspace path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/conversionWorkspaces/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def migration_job_path(project: str,location: str,migration_job: str,) -> str: - """Returns a fully-qualified migration_job string.""" - return "projects/{project}/locations/{location}/migrationJobs/{migration_job}".format(project=project, location=location, migration_job=migration_job, ) - - @staticmethod - def parse_migration_job_path(path: str) -> Dict[str,str]: - """Parses a migration_job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/migrationJobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def networks_path(project: str,network: str,) -> str: - """Returns a fully-qualified networks string.""" - return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) - - @staticmethod - def parse_networks_path(path: str) -> Dict[str,str]: - """Parses a networks path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def private_connection_path(project: str,location: str,private_connection: str,) -> str: - """Returns a fully-qualified private_connection string.""" - return "projects/{project}/locations/{location}/privateConnections/{private_connection}".format(project=project, location=location, private_connection=private_connection, ) - - @staticmethod - def parse_private_connection_path(path: str) -> Dict[str,str]: - """Parses a private_connection path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/privateConnections/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataMigrationServiceTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data migration service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, DataMigrationServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, DataMigrationServiceTransport): - # transport is a DataMigrationServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def list_migration_jobs(self, - request: Optional[Union[clouddms.ListMigrationJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListMigrationJobsPager: - r"""Lists migration jobs in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_list_migration_jobs(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.ListMigrationJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.ListMigrationJobsRequest, dict]): - The request object. Retrieves a list of all migration - jobs in a given project and location. - parent (str): - Required. The parent which owns this - collection of migrationJobs. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsPager: - Response message for - 'ListMigrationJobs' request. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.ListMigrationJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.ListMigrationJobsRequest): - request = clouddms.ListMigrationJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_migration_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListMigrationJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_migration_job(self, - request: Optional[Union[clouddms.GetMigrationJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clouddms_resources.MigrationJob: - r"""Gets details of a single migration job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_get_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.GetMigrationJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_migration_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.GetMigrationJobRequest, dict]): - The request object. Request message for 'GetMigrationJob' - request. - name (str): - Required. Name of the migration job - resource to get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.types.MigrationJob: - Represents a Database Migration - Service migration job object. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.GetMigrationJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.GetMigrationJobRequest): - request = clouddms.GetMigrationJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_migration_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_migration_job(self, - request: Optional[Union[clouddms.CreateMigrationJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - migration_job: Optional[clouddms_resources.MigrationJob] = None, - migration_job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a new migration job in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_create_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - migration_job = clouddms_v1.MigrationJob() - migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" - migration_job.reverse_ssh_connectivity.vm_port = 775 - migration_job.type_ = "CONTINUOUS" - migration_job.source = "source_value" - migration_job.destination = "destination_value" - - request = clouddms_v1.CreateMigrationJobRequest( - parent="parent_value", - migration_job_id="migration_job_id_value", - migration_job=migration_job, - ) - - # Make the request - operation = client.create_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.CreateMigrationJobRequest, dict]): - The request object. Request message to create a new - Database Migration Service migration job - in the specified project and region. - parent (str): - Required. The parent which owns this - collection of migration jobs. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - migration_job (google.cloud.clouddms_v1.types.MigrationJob): - Required. Represents a `migration - job `__ - object. - - This corresponds to the ``migration_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - migration_job_id (str): - Required. The ID of the instance to - create. - - This corresponds to the ``migration_job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, migration_job, migration_job_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.CreateMigrationJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.CreateMigrationJobRequest): - request = clouddms.CreateMigrationJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if migration_job is not None: - request.migration_job = migration_job - if migration_job_id is not None: - request.migration_job_id = migration_job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_migration_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_migration_job(self, - request: Optional[Union[clouddms.UpdateMigrationJobRequest, dict]] = None, - *, - migration_job: Optional[clouddms_resources.MigrationJob] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates the parameters of a single migration job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_update_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - migration_job = clouddms_v1.MigrationJob() - migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" - migration_job.reverse_ssh_connectivity.vm_port = 775 - migration_job.type_ = "CONTINUOUS" - migration_job.source = "source_value" - migration_job.destination = "destination_value" - - request = clouddms_v1.UpdateMigrationJobRequest( - migration_job=migration_job, - ) - - # Make the request - operation = client.update_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.UpdateMigrationJobRequest, dict]): - The request object. Request message for - 'UpdateMigrationJob' request. - migration_job (google.cloud.clouddms_v1.types.MigrationJob): - Required. The migration job - parameters to update. - - This corresponds to the ``migration_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to - specify the fields to be overwritten by - the update in the conversion workspace - resource. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([migration_job, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.UpdateMigrationJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.UpdateMigrationJobRequest): - request = clouddms.UpdateMigrationJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if migration_job is not None: - request.migration_job = migration_job - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_migration_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("migration_job.name", request.migration_job.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_migration_job(self, - request: Optional[Union[clouddms.DeleteMigrationJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a single migration job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_delete_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.DeleteMigrationJobRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.DeleteMigrationJobRequest, dict]): - The request object. Request message for - 'DeleteMigrationJob' request. - name (str): - Required. Name of the migration job - resource to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.DeleteMigrationJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.DeleteMigrationJobRequest): - request = clouddms.DeleteMigrationJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_migration_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def start_migration_job(self, - request: Optional[Union[clouddms.StartMigrationJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Start an already created migration job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_start_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.StartMigrationJobRequest( - ) - - # Make the request - operation = client.start_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.StartMigrationJobRequest, dict]): - The request object. Request message for - 'StartMigrationJob' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.StartMigrationJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.StartMigrationJobRequest): - request = clouddms.StartMigrationJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.start_migration_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def stop_migration_job(self, - request: Optional[Union[clouddms.StopMigrationJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Stops a running migration job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_stop_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.StopMigrationJobRequest( - ) - - # Make the request - operation = client.stop_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.StopMigrationJobRequest, dict]): - The request object. Request message for - 'StopMigrationJob' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.StopMigrationJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.StopMigrationJobRequest): - request = clouddms.StopMigrationJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.stop_migration_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def resume_migration_job(self, - request: Optional[Union[clouddms.ResumeMigrationJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Resume a migration job that is currently stopped and - is resumable (was stopped during CDC phase). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_resume_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.ResumeMigrationJobRequest( - ) - - # Make the request - operation = client.resume_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.ResumeMigrationJobRequest, dict]): - The request object. Request message for - 'ResumeMigrationJob' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.ResumeMigrationJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.ResumeMigrationJobRequest): - request = clouddms.ResumeMigrationJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.resume_migration_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def promote_migration_job(self, - request: Optional[Union[clouddms.PromoteMigrationJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Promote a migration job, stopping replication to the - destination and promoting the destination to be a - standalone database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_promote_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.PromoteMigrationJobRequest( - ) - - # Make the request - operation = client.promote_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.PromoteMigrationJobRequest, dict]): - The request object. Request message for - 'PromoteMigrationJob' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.PromoteMigrationJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.PromoteMigrationJobRequest): - request = clouddms.PromoteMigrationJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.promote_migration_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def verify_migration_job(self, - request: Optional[Union[clouddms.VerifyMigrationJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Verify a migration job, making sure the destination - can reach the source and that all configuration and - prerequisites are met. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_verify_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.VerifyMigrationJobRequest( - ) - - # Make the request - operation = client.verify_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.VerifyMigrationJobRequest, dict]): - The request object. Request message for - 'VerifyMigrationJob' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.VerifyMigrationJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.VerifyMigrationJobRequest): - request = clouddms.VerifyMigrationJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.verify_migration_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def restart_migration_job(self, - request: Optional[Union[clouddms.RestartMigrationJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Restart a stopped or failed migration job, resetting - the destination instance to its original state and - starting the migration process from scratch. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_restart_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.RestartMigrationJobRequest( - ) - - # Make the request - operation = client.restart_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.RestartMigrationJobRequest, dict]): - The request object. Request message for - 'RestartMigrationJob' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.MigrationJob` - Represents a Database Migration Service migration job - object. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.RestartMigrationJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.RestartMigrationJobRequest): - request = clouddms.RestartMigrationJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.restart_migration_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clouddms_resources.MigrationJob, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def generate_ssh_script(self, - request: Optional[Union[clouddms.GenerateSshScriptRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clouddms.SshScript: - r"""Generate a SSH configuration script to configure the - reverse SSH connectivity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_generate_ssh_script(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - vm_creation_config = clouddms_v1.VmCreationConfig() - vm_creation_config.vm_machine_type = "vm_machine_type_value" - - request = clouddms_v1.GenerateSshScriptRequest( - vm_creation_config=vm_creation_config, - vm="vm_value", - ) - - # Make the request - response = client.generate_ssh_script(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.GenerateSshScriptRequest, dict]): - The request object. Request message for - 'GenerateSshScript' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.types.SshScript: - Response message for - 'GenerateSshScript' request. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.GenerateSshScriptRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.GenerateSshScriptRequest): - request = clouddms.GenerateSshScriptRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.generate_ssh_script] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("migration_job", request.migration_job), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_connection_profiles(self, - request: Optional[Union[clouddms.ListConnectionProfilesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListConnectionProfilesPager: - r"""Retrieves a list of all connection profiles in a - given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_list_connection_profiles(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.ListConnectionProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connection_profiles(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.ListConnectionProfilesRequest, dict]): - The request object. Request message for - 'ListConnectionProfiles' request. - parent (str): - Required. The parent which owns this - collection of connection profiles. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesPager: - Response message for - 'ListConnectionProfiles' request. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.ListConnectionProfilesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.ListConnectionProfilesRequest): - request = clouddms.ListConnectionProfilesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_connection_profiles] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListConnectionProfilesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_connection_profile(self, - request: Optional[Union[clouddms.GetConnectionProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clouddms_resources.ConnectionProfile: - r"""Gets details of a single connection profile. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_get_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.GetConnectionProfileRequest( - name="name_value", - ) - - # Make the request - response = client.get_connection_profile(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.GetConnectionProfileRequest, dict]): - The request object. Request message for - 'GetConnectionProfile' request. - name (str): - Required. Name of the connection - profile resource to get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.types.ConnectionProfile: - A connection profile definition. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.GetConnectionProfileRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.GetConnectionProfileRequest): - request = clouddms.GetConnectionProfileRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_connection_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_connection_profile(self, - request: Optional[Union[clouddms.CreateConnectionProfileRequest, dict]] = None, - *, - parent: Optional[str] = None, - connection_profile: Optional[clouddms_resources.ConnectionProfile] = None, - connection_profile_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a new connection profile in a given project - and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_create_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - connection_profile = clouddms_v1.ConnectionProfile() - connection_profile.mysql.host = "host_value" - connection_profile.mysql.port = 453 - connection_profile.mysql.username = "username_value" - connection_profile.mysql.password = "password_value" - - request = clouddms_v1.CreateConnectionProfileRequest( - parent="parent_value", - connection_profile_id="connection_profile_id_value", - connection_profile=connection_profile, - ) - - # Make the request - operation = client.create_connection_profile(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.CreateConnectionProfileRequest, dict]): - The request object. Request message for - 'CreateConnectionProfile' request. - parent (str): - Required. The parent which owns this - collection of connection profiles. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection_profile (google.cloud.clouddms_v1.types.ConnectionProfile): - Required. The create request body - including the connection profile data - - This corresponds to the ``connection_profile`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection_profile_id (str): - Required. The connection profile - identifier. - - This corresponds to the ``connection_profile_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConnectionProfile` - A connection profile definition. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, connection_profile, connection_profile_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.CreateConnectionProfileRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.CreateConnectionProfileRequest): - request = clouddms.CreateConnectionProfileRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if connection_profile is not None: - request.connection_profile = connection_profile - if connection_profile_id is not None: - request.connection_profile_id = connection_profile_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_connection_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clouddms_resources.ConnectionProfile, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_connection_profile(self, - request: Optional[Union[clouddms.UpdateConnectionProfileRequest, dict]] = None, - *, - connection_profile: Optional[clouddms_resources.ConnectionProfile] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Update the configuration of a single connection - profile. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_update_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - connection_profile = clouddms_v1.ConnectionProfile() - connection_profile.mysql.host = "host_value" - connection_profile.mysql.port = 453 - connection_profile.mysql.username = "username_value" - connection_profile.mysql.password = "password_value" - - request = clouddms_v1.UpdateConnectionProfileRequest( - connection_profile=connection_profile, - ) - - # Make the request - operation = client.update_connection_profile(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.UpdateConnectionProfileRequest, dict]): - The request object. Request message for - 'UpdateConnectionProfile' request. - connection_profile (google.cloud.clouddms_v1.types.ConnectionProfile): - Required. The connection profile - parameters to update. - - This corresponds to the ``connection_profile`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to - specify the fields to be overwritten by - the update in the conversion workspace - resource. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConnectionProfile` - A connection profile definition. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([connection_profile, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.UpdateConnectionProfileRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.UpdateConnectionProfileRequest): - request = clouddms.UpdateConnectionProfileRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if connection_profile is not None: - request.connection_profile = connection_profile - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_connection_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("connection_profile.name", request.connection_profile.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clouddms_resources.ConnectionProfile, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_connection_profile(self, - request: Optional[Union[clouddms.DeleteConnectionProfileRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a single Database Migration Service - connection profile. A connection profile can only be - deleted if it is not in use by any active migration - jobs. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_delete_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.DeleteConnectionProfileRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_connection_profile(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest, dict]): - The request object. Request message for - 'DeleteConnectionProfile' request. - name (str): - Required. Name of the connection - profile resource to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.DeleteConnectionProfileRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.DeleteConnectionProfileRequest): - request = clouddms.DeleteConnectionProfileRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_connection_profile] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def create_private_connection(self, - request: Optional[Union[clouddms.CreatePrivateConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - private_connection: Optional[clouddms_resources.PrivateConnection] = None, - private_connection_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a new private connection in a given project - and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_create_private_connection(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - private_connection = clouddms_v1.PrivateConnection() - private_connection.vpc_peering_config.vpc_name = "vpc_name_value" - private_connection.vpc_peering_config.subnet = "subnet_value" - - request = clouddms_v1.CreatePrivateConnectionRequest( - parent="parent_value", - private_connection_id="private_connection_id_value", - private_connection=private_connection, - ) - - # Make the request - operation = client.create_private_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest, dict]): - The request object. Request message to create a new - private connection in the specified - project and region. - parent (str): - Required. The parent that owns the - collection of PrivateConnections. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - private_connection (google.cloud.clouddms_v1.types.PrivateConnection): - Required. The private connection - resource to create. - - This corresponds to the ``private_connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - private_connection_id (str): - Required. The private connection - identifier. - - This corresponds to the ``private_connection_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.clouddms_v1.types.PrivateConnection` The PrivateConnection resource is used to establish private connectivity - with the customer's network. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, private_connection, private_connection_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.CreatePrivateConnectionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.CreatePrivateConnectionRequest): - request = clouddms.CreatePrivateConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if private_connection is not None: - request.private_connection = private_connection - if private_connection_id is not None: - request.private_connection_id = private_connection_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_private_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clouddms_resources.PrivateConnection, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_private_connection(self, - request: Optional[Union[clouddms.GetPrivateConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clouddms_resources.PrivateConnection: - r"""Gets details of a single private connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_get_private_connection(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.GetPrivateConnectionRequest( - name="name_value", - ) - - # Make the request - response = client.get_private_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.GetPrivateConnectionRequest, dict]): - The request object. Request message to get a private - connection resource. - name (str): - Required. The name of the private - connection to get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.types.PrivateConnection: - The PrivateConnection resource is - used to establish private connectivity - with the customer's network. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.GetPrivateConnectionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.GetPrivateConnectionRequest): - request = clouddms.GetPrivateConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_private_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_private_connections(self, - request: Optional[Union[clouddms.ListPrivateConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListPrivateConnectionsPager: - r"""Retrieves a list of private connections in a given - project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_list_private_connections(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.ListPrivateConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_private_connections(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest, dict]): - The request object. Request message to retrieve a list of - private connections in a given project - and location. - parent (str): - Required. The parent that owns the - collection of private connections. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsPager: - Response message for - 'ListPrivateConnections' request. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.ListPrivateConnectionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.ListPrivateConnectionsRequest): - request = clouddms.ListPrivateConnectionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_private_connections] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListPrivateConnectionsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_private_connection(self, - request: Optional[Union[clouddms.DeletePrivateConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a single Database Migration Service private - connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_delete_private_connection(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.DeletePrivateConnectionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_private_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest, dict]): - The request object. Request message to delete a private - connection. - name (str): - Required. The name of the private - connection to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.DeletePrivateConnectionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.DeletePrivateConnectionRequest): - request = clouddms.DeletePrivateConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_private_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_conversion_workspace(self, - request: Optional[Union[clouddms.GetConversionWorkspaceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> conversionworkspace_resources.ConversionWorkspace: - r"""Gets details of a single conversion workspace. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_get_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.GetConversionWorkspaceRequest( - name="name_value", - ) - - # Make the request - response = client.get_conversion_workspace(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest, dict]): - The request object. Request message for - 'GetConversionWorkspace' request. - name (str): - Required. Name of the conversion - workspace resource to get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.types.ConversionWorkspace: - The main conversion workspace - resource entity. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.GetConversionWorkspaceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.GetConversionWorkspaceRequest): - request = clouddms.GetConversionWorkspaceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_conversion_workspace] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_conversion_workspaces(self, - request: Optional[Union[clouddms.ListConversionWorkspacesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListConversionWorkspacesPager: - r"""Lists conversion workspaces in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_list_conversion_workspaces(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.ListConversionWorkspacesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_conversion_workspaces(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest, dict]): - The request object. Retrieve a list of all conversion - workspaces in a given project and - location. - parent (str): - Required. The parent which owns this - collection of conversion workspaces. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesPager: - Response message for - 'ListConversionWorkspaces' request. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.ListConversionWorkspacesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.ListConversionWorkspacesRequest): - request = clouddms.ListConversionWorkspacesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_conversion_workspaces] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListConversionWorkspacesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_conversion_workspace(self, - request: Optional[Union[clouddms.CreateConversionWorkspaceRequest, dict]] = None, - *, - parent: Optional[str] = None, - conversion_workspace: Optional[conversionworkspace_resources.ConversionWorkspace] = None, - conversion_workspace_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a new conversion workspace in a given project - and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_create_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - conversion_workspace = clouddms_v1.ConversionWorkspace() - conversion_workspace.source.engine = "ORACLE" - conversion_workspace.source.version = "version_value" - conversion_workspace.destination.engine = "ORACLE" - conversion_workspace.destination.version = "version_value" - - request = clouddms_v1.CreateConversionWorkspaceRequest( - parent="parent_value", - conversion_workspace_id="conversion_workspace_id_value", - conversion_workspace=conversion_workspace, - ) - - # Make the request - operation = client.create_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest, dict]): - The request object. Request message to create a new - Conversion Workspace in the specified - project and region. - parent (str): - Required. The parent which owns this - collection of conversion workspaces. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): - Required. Represents a conversion - workspace object. - - This corresponds to the ``conversion_workspace`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - conversion_workspace_id (str): - Required. The ID of the conversion - workspace to create. - - This corresponds to the ``conversion_workspace_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, conversion_workspace, conversion_workspace_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.CreateConversionWorkspaceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.CreateConversionWorkspaceRequest): - request = clouddms.CreateConversionWorkspaceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if conversion_workspace is not None: - request.conversion_workspace = conversion_workspace - if conversion_workspace_id is not None: - request.conversion_workspace_id = conversion_workspace_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_conversion_workspace] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_conversion_workspace(self, - request: Optional[Union[clouddms.UpdateConversionWorkspaceRequest, dict]] = None, - *, - conversion_workspace: Optional[conversionworkspace_resources.ConversionWorkspace] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates the parameters of a single conversion - workspace. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_update_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - conversion_workspace = clouddms_v1.ConversionWorkspace() - conversion_workspace.source.engine = "ORACLE" - conversion_workspace.source.version = "version_value" - conversion_workspace.destination.engine = "ORACLE" - conversion_workspace.destination.version = "version_value" - - request = clouddms_v1.UpdateConversionWorkspaceRequest( - conversion_workspace=conversion_workspace, - ) - - # Make the request - operation = client.update_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest, dict]): - The request object. Request message for - 'UpdateConversionWorkspace' request. - conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): - Required. The conversion workspace - parameters to update. - - This corresponds to the ``conversion_workspace`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to - specify the fields to be overwritten by - the update in the conversion workspace - resource. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([conversion_workspace, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.UpdateConversionWorkspaceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.UpdateConversionWorkspaceRequest): - request = clouddms.UpdateConversionWorkspaceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if conversion_workspace is not None: - request.conversion_workspace = conversion_workspace - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_conversion_workspace] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("conversion_workspace.name", request.conversion_workspace.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_conversion_workspace(self, - request: Optional[Union[clouddms.DeleteConversionWorkspaceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a single conversion workspace. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_delete_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.DeleteConversionWorkspaceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest, dict]): - The request object. Request message for - 'DeleteConversionWorkspace' request. - name (str): - Required. Name of the conversion - workspace resource to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.DeleteConversionWorkspaceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.DeleteConversionWorkspaceRequest): - request = clouddms.DeleteConversionWorkspaceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_conversion_workspace] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def seed_conversion_workspace(self, - request: Optional[Union[clouddms.SeedConversionWorkspaceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Imports a snapshot of the source database into the - conversion workspace. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_seed_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.SeedConversionWorkspaceRequest( - source_connection_profile="source_connection_profile_value", - ) - - # Make the request - operation = client.seed_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest, dict]): - The request object. Request message for - 'SeedConversionWorkspace' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.SeedConversionWorkspaceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.SeedConversionWorkspaceRequest): - request = clouddms.SeedConversionWorkspaceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.seed_conversion_workspace] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def import_mapping_rules(self, - request: Optional[Union[clouddms.ImportMappingRulesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Imports the mapping rules for a given conversion - workspace. Supports various formats of external rules - files. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_import_mapping_rules(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.ImportMappingRulesRequest( - parent="parent_value", - ) - - # Make the request - operation = client.import_mapping_rules(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.ImportMappingRulesRequest, dict]): - The request object. Request message for - 'ImportMappingRules' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.ImportMappingRulesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.ImportMappingRulesRequest): - request = clouddms.ImportMappingRulesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.import_mapping_rules] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def convert_conversion_workspace(self, - request: Optional[Union[clouddms.ConvertConversionWorkspaceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a draft tree schema for the destination - database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_convert_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.ConvertConversionWorkspaceRequest( - ) - - # Make the request - operation = client.convert_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest, dict]): - The request object. Request message for - 'ConvertConversionWorkspace' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.ConvertConversionWorkspaceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.ConvertConversionWorkspaceRequest): - request = clouddms.ConvertConversionWorkspaceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.convert_conversion_workspace] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def commit_conversion_workspace(self, - request: Optional[Union[clouddms.CommitConversionWorkspaceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Marks all the data in the conversion workspace as - committed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_commit_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.CommitConversionWorkspaceRequest( - name="name_value", - ) - - # Make the request - operation = client.commit_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest, dict]): - The request object. Request message for - 'CommitConversionWorkspace' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.CommitConversionWorkspaceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.CommitConversionWorkspaceRequest): - request = clouddms.CommitConversionWorkspaceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.commit_conversion_workspace] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def rollback_conversion_workspace(self, - request: Optional[Union[clouddms.RollbackConversionWorkspaceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Rolls back a conversion workspace to the last - committed snapshot. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_rollback_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.RollbackConversionWorkspaceRequest( - name="name_value", - ) - - # Make the request - operation = client.rollback_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest, dict]): - The request object. Request message for - 'RollbackConversionWorkspace' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.RollbackConversionWorkspaceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.RollbackConversionWorkspaceRequest): - request = clouddms.RollbackConversionWorkspaceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rollback_conversion_workspace] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def apply_conversion_workspace(self, - request: Optional[Union[clouddms.ApplyConversionWorkspaceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Applies draft tree onto a specific destination - database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_apply_conversion_workspace(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.ApplyConversionWorkspaceRequest( - connection_profile="connection_profile_value", - name="name_value", - ) - - # Make the request - operation = client.apply_conversion_workspace(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest, dict]): - The request object. Request message for - 'ApplyConversionWorkspace' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` - The main conversion workspace resource entity. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.ApplyConversionWorkspaceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.ApplyConversionWorkspaceRequest): - request = clouddms.ApplyConversionWorkspaceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.apply_conversion_workspace] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - conversionworkspace_resources.ConversionWorkspace, - metadata_type=clouddms.OperationMetadata, - ) - - # Done; return the response. - return response - - def describe_database_entities(self, - request: Optional[Union[clouddms.DescribeDatabaseEntitiesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.DescribeDatabaseEntitiesPager: - r"""Describes the database entities tree for a specific - conversion workspace and a specific tree type. - - Database entities are not resources like conversion - workspaces or mapping rules, and they can't be created, - updated or deleted. Instead, they are simple data - objects describing the structure of the client database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_describe_database_entities(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.DescribeDatabaseEntitiesRequest( - conversion_workspace="conversion_workspace_value", - ) - - # Make the request - page_result = client.describe_database_entities(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest, dict]): - The request object. Request message for - 'DescribeDatabaseEntities' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesPager: - Response message for - 'DescribeDatabaseEntities' request. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.DescribeDatabaseEntitiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.DescribeDatabaseEntitiesRequest): - request = clouddms.DescribeDatabaseEntitiesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.describe_database_entities] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("conversion_workspace", request.conversion_workspace), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.DescribeDatabaseEntitiesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def search_background_jobs(self, - request: Optional[Union[clouddms.SearchBackgroundJobsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clouddms.SearchBackgroundJobsResponse: - r"""Searches/lists the background jobs for a specific - conversion workspace. - - The background jobs are not resources like conversion - workspaces or mapping rules, and they can't be created, - updated or deleted. Instead, they are a way to expose - the data plane jobs log. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_search_background_jobs(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.SearchBackgroundJobsRequest( - conversion_workspace="conversion_workspace_value", - ) - - # Make the request - response = client.search_background_jobs(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest, dict]): - The request object. Request message for - 'SearchBackgroundJobs' request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse: - Response message for - 'SearchBackgroundJobs' request. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.SearchBackgroundJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.SearchBackgroundJobsRequest): - request = clouddms.SearchBackgroundJobsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.search_background_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("conversion_workspace", request.conversion_workspace), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def describe_conversion_workspace_revisions(self, - request: Optional[Union[clouddms.DescribeConversionWorkspaceRevisionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clouddms.DescribeConversionWorkspaceRevisionsResponse: - r"""Retrieves a list of committed revisions of a specific - conversion workspace. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_describe_conversion_workspace_revisions(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( - conversion_workspace="conversion_workspace_value", - ) - - # Make the request - response = client.describe_conversion_workspace_revisions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest, dict]): - The request object. Request message for - 'DescribeConversionWorkspaceRevisions' - request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse: - Response message for - 'DescribeConversionWorkspaceRevisions' - request. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.DescribeConversionWorkspaceRevisionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.DescribeConversionWorkspaceRevisionsRequest): - request = clouddms.DescribeConversionWorkspaceRevisionsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.describe_conversion_workspace_revisions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("conversion_workspace", request.conversion_workspace), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def fetch_static_ips(self, - request: Optional[Union[clouddms.FetchStaticIpsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.FetchStaticIpsPager: - r"""Fetches a set of static IP addresses that need to be - allowlisted by the customer when using the static-IP - connectivity method. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import clouddms_v1 - - def sample_fetch_static_ips(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.FetchStaticIpsRequest( - name="name_value", - ) - - # Make the request - page_result = client.fetch_static_ips(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.clouddms_v1.types.FetchStaticIpsRequest, dict]): - The request object. Request message for 'FetchStaticIps' - request. - name (str): - Required. The resource name for the location for which - static IPs should be returned. Must be in the format - ``projects/*/locations/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsPager: - Response message for a - 'FetchStaticIps' request. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clouddms.FetchStaticIpsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clouddms.FetchStaticIpsRequest): - request = clouddms.FetchStaticIpsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.fetch_static_ips] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.FetchStaticIpsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DataMigrationServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataMigrationServiceClient", -) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/pagers.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/pagers.py deleted file mode 100644 index a543082..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/pagers.py +++ /dev/null @@ -1,746 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.clouddms_v1.types import clouddms -from google.cloud.clouddms_v1.types import clouddms_resources -from google.cloud.clouddms_v1.types import conversionworkspace_resources - - -class ListMigrationJobsPager: - """A pager for iterating through ``list_migration_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.clouddms_v1.types.ListMigrationJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``migration_jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListMigrationJobs`` requests and continue to iterate - through the ``migration_jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.clouddms_v1.types.ListMigrationJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., clouddms.ListMigrationJobsResponse], - request: clouddms.ListMigrationJobsRequest, - response: clouddms.ListMigrationJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.clouddms_v1.types.ListMigrationJobsRequest): - The initial request object. - response (google.cloud.clouddms_v1.types.ListMigrationJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = clouddms.ListMigrationJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[clouddms.ListMigrationJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[clouddms_resources.MigrationJob]: - for page in self.pages: - yield from page.migration_jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMigrationJobsAsyncPager: - """A pager for iterating through ``list_migration_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.clouddms_v1.types.ListMigrationJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``migration_jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListMigrationJobs`` requests and continue to iterate - through the ``migration_jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.clouddms_v1.types.ListMigrationJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[clouddms.ListMigrationJobsResponse]], - request: clouddms.ListMigrationJobsRequest, - response: clouddms.ListMigrationJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.clouddms_v1.types.ListMigrationJobsRequest): - The initial request object. - response (google.cloud.clouddms_v1.types.ListMigrationJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = clouddms.ListMigrationJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[clouddms.ListMigrationJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[clouddms_resources.MigrationJob]: - async def async_generator(): - async for page in self.pages: - for response in page.migration_jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListConnectionProfilesPager: - """A pager for iterating through ``list_connection_profiles`` requests. - - This class thinly wraps an initial - :class:`google.cloud.clouddms_v1.types.ListConnectionProfilesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``connection_profiles`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListConnectionProfiles`` requests and continue to iterate - through the ``connection_profiles`` field on the - corresponding responses. - - All the usual :class:`google.cloud.clouddms_v1.types.ListConnectionProfilesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., clouddms.ListConnectionProfilesResponse], - request: clouddms.ListConnectionProfilesRequest, - response: clouddms.ListConnectionProfilesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.clouddms_v1.types.ListConnectionProfilesRequest): - The initial request object. - response (google.cloud.clouddms_v1.types.ListConnectionProfilesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = clouddms.ListConnectionProfilesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[clouddms.ListConnectionProfilesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[clouddms_resources.ConnectionProfile]: - for page in self.pages: - yield from page.connection_profiles - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListConnectionProfilesAsyncPager: - """A pager for iterating through ``list_connection_profiles`` requests. - - This class thinly wraps an initial - :class:`google.cloud.clouddms_v1.types.ListConnectionProfilesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``connection_profiles`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListConnectionProfiles`` requests and continue to iterate - through the ``connection_profiles`` field on the - corresponding responses. - - All the usual :class:`google.cloud.clouddms_v1.types.ListConnectionProfilesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[clouddms.ListConnectionProfilesResponse]], - request: clouddms.ListConnectionProfilesRequest, - response: clouddms.ListConnectionProfilesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.clouddms_v1.types.ListConnectionProfilesRequest): - The initial request object. - response (google.cloud.clouddms_v1.types.ListConnectionProfilesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = clouddms.ListConnectionProfilesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[clouddms.ListConnectionProfilesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[clouddms_resources.ConnectionProfile]: - async def async_generator(): - async for page in self.pages: - for response in page.connection_profiles: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListPrivateConnectionsPager: - """A pager for iterating through ``list_private_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``private_connections`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListPrivateConnections`` requests and continue to iterate - through the ``private_connections`` field on the - corresponding responses. - - All the usual :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., clouddms.ListPrivateConnectionsResponse], - request: clouddms.ListPrivateConnectionsRequest, - response: clouddms.ListPrivateConnectionsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest): - The initial request object. - response (google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = clouddms.ListPrivateConnectionsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[clouddms.ListPrivateConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[clouddms_resources.PrivateConnection]: - for page in self.pages: - yield from page.private_connections - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListPrivateConnectionsAsyncPager: - """A pager for iterating through ``list_private_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``private_connections`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListPrivateConnections`` requests and continue to iterate - through the ``private_connections`` field on the - corresponding responses. - - All the usual :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[clouddms.ListPrivateConnectionsResponse]], - request: clouddms.ListPrivateConnectionsRequest, - response: clouddms.ListPrivateConnectionsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest): - The initial request object. - response (google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = clouddms.ListPrivateConnectionsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[clouddms.ListPrivateConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[clouddms_resources.PrivateConnection]: - async def async_generator(): - async for page in self.pages: - for response in page.private_connections: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListConversionWorkspacesPager: - """A pager for iterating through ``list_conversion_workspaces`` requests. - - This class thinly wraps an initial - :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``conversion_workspaces`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListConversionWorkspaces`` requests and continue to iterate - through the ``conversion_workspaces`` field on the - corresponding responses. - - All the usual :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., clouddms.ListConversionWorkspacesResponse], - request: clouddms.ListConversionWorkspacesRequest, - response: clouddms.ListConversionWorkspacesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest): - The initial request object. - response (google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = clouddms.ListConversionWorkspacesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[clouddms.ListConversionWorkspacesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[conversionworkspace_resources.ConversionWorkspace]: - for page in self.pages: - yield from page.conversion_workspaces - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListConversionWorkspacesAsyncPager: - """A pager for iterating through ``list_conversion_workspaces`` requests. - - This class thinly wraps an initial - :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``conversion_workspaces`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListConversionWorkspaces`` requests and continue to iterate - through the ``conversion_workspaces`` field on the - corresponding responses. - - All the usual :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[clouddms.ListConversionWorkspacesResponse]], - request: clouddms.ListConversionWorkspacesRequest, - response: clouddms.ListConversionWorkspacesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest): - The initial request object. - response (google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = clouddms.ListConversionWorkspacesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[clouddms.ListConversionWorkspacesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[conversionworkspace_resources.ConversionWorkspace]: - async def async_generator(): - async for page in self.pages: - for response in page.conversion_workspaces: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class DescribeDatabaseEntitiesPager: - """A pager for iterating through ``describe_database_entities`` requests. - - This class thinly wraps an initial - :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``database_entities`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``DescribeDatabaseEntities`` requests and continue to iterate - through the ``database_entities`` field on the - corresponding responses. - - All the usual :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., clouddms.DescribeDatabaseEntitiesResponse], - request: clouddms.DescribeDatabaseEntitiesRequest, - response: clouddms.DescribeDatabaseEntitiesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest): - The initial request object. - response (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = clouddms.DescribeDatabaseEntitiesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[clouddms.DescribeDatabaseEntitiesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[conversionworkspace_resources.DatabaseEntity]: - for page in self.pages: - yield from page.database_entities - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class DescribeDatabaseEntitiesAsyncPager: - """A pager for iterating through ``describe_database_entities`` requests. - - This class thinly wraps an initial - :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``database_entities`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``DescribeDatabaseEntities`` requests and continue to iterate - through the ``database_entities`` field on the - corresponding responses. - - All the usual :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[clouddms.DescribeDatabaseEntitiesResponse]], - request: clouddms.DescribeDatabaseEntitiesRequest, - response: clouddms.DescribeDatabaseEntitiesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest): - The initial request object. - response (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = clouddms.DescribeDatabaseEntitiesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[clouddms.DescribeDatabaseEntitiesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[conversionworkspace_resources.DatabaseEntity]: - async def async_generator(): - async for page in self.pages: - for response in page.database_entities: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class FetchStaticIpsPager: - """A pager for iterating through ``fetch_static_ips`` requests. - - This class thinly wraps an initial - :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``static_ips`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``FetchStaticIps`` requests and continue to iterate - through the ``static_ips`` field on the - corresponding responses. - - All the usual :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., clouddms.FetchStaticIpsResponse], - request: clouddms.FetchStaticIpsRequest, - response: clouddms.FetchStaticIpsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.clouddms_v1.types.FetchStaticIpsRequest): - The initial request object. - response (google.cloud.clouddms_v1.types.FetchStaticIpsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = clouddms.FetchStaticIpsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[clouddms.FetchStaticIpsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[str]: - for page in self.pages: - yield from page.static_ips - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class FetchStaticIpsAsyncPager: - """A pager for iterating through ``fetch_static_ips`` requests. - - This class thinly wraps an initial - :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``static_ips`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``FetchStaticIps`` requests and continue to iterate - through the ``static_ips`` field on the - corresponding responses. - - All the usual :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[clouddms.FetchStaticIpsResponse]], - request: clouddms.FetchStaticIpsRequest, - response: clouddms.FetchStaticIpsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.clouddms_v1.types.FetchStaticIpsRequest): - The initial request object. - response (google.cloud.clouddms_v1.types.FetchStaticIpsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = clouddms.FetchStaticIpsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[clouddms.FetchStaticIpsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[str]: - async def async_generator(): - async for page in self.pages: - for response in page.static_ips: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/__init__.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/__init__.py deleted file mode 100644 index c5bfc5f..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DataMigrationServiceTransport -from .grpc import DataMigrationServiceGrpcTransport -from .grpc_asyncio import DataMigrationServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DataMigrationServiceTransport]] -_transport_registry['grpc'] = DataMigrationServiceGrpcTransport -_transport_registry['grpc_asyncio'] = DataMigrationServiceGrpcAsyncIOTransport - -__all__ = ( - 'DataMigrationServiceTransport', - 'DataMigrationServiceGrpcTransport', - 'DataMigrationServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py deleted file mode 100644 index ddfe3fb..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py +++ /dev/null @@ -1,733 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.clouddms_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.clouddms_v1.types import clouddms -from google.cloud.clouddms_v1.types import clouddms_resources -from google.cloud.clouddms_v1.types import conversionworkspace_resources -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class DataMigrationServiceTransport(abc.ABC): - """Abstract transport class for DataMigrationService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'datamigration.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_migration_jobs: gapic_v1.method.wrap_method( - self.list_migration_jobs, - default_timeout=60.0, - client_info=client_info, - ), - self.get_migration_job: gapic_v1.method.wrap_method( - self.get_migration_job, - default_timeout=60.0, - client_info=client_info, - ), - self.create_migration_job: gapic_v1.method.wrap_method( - self.create_migration_job, - default_timeout=60.0, - client_info=client_info, - ), - self.update_migration_job: gapic_v1.method.wrap_method( - self.update_migration_job, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_migration_job: gapic_v1.method.wrap_method( - self.delete_migration_job, - default_timeout=60.0, - client_info=client_info, - ), - self.start_migration_job: gapic_v1.method.wrap_method( - self.start_migration_job, - default_timeout=60.0, - client_info=client_info, - ), - self.stop_migration_job: gapic_v1.method.wrap_method( - self.stop_migration_job, - default_timeout=60.0, - client_info=client_info, - ), - self.resume_migration_job: gapic_v1.method.wrap_method( - self.resume_migration_job, - default_timeout=60.0, - client_info=client_info, - ), - self.promote_migration_job: gapic_v1.method.wrap_method( - self.promote_migration_job, - default_timeout=60.0, - client_info=client_info, - ), - self.verify_migration_job: gapic_v1.method.wrap_method( - self.verify_migration_job, - default_timeout=60.0, - client_info=client_info, - ), - self.restart_migration_job: gapic_v1.method.wrap_method( - self.restart_migration_job, - default_timeout=60.0, - client_info=client_info, - ), - self.generate_ssh_script: gapic_v1.method.wrap_method( - self.generate_ssh_script, - default_timeout=60.0, - client_info=client_info, - ), - self.list_connection_profiles: gapic_v1.method.wrap_method( - self.list_connection_profiles, - default_timeout=60.0, - client_info=client_info, - ), - self.get_connection_profile: gapic_v1.method.wrap_method( - self.get_connection_profile, - default_timeout=60.0, - client_info=client_info, - ), - self.create_connection_profile: gapic_v1.method.wrap_method( - self.create_connection_profile, - default_timeout=60.0, - client_info=client_info, - ), - self.update_connection_profile: gapic_v1.method.wrap_method( - self.update_connection_profile, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_connection_profile: gapic_v1.method.wrap_method( - self.delete_connection_profile, - default_timeout=60.0, - client_info=client_info, - ), - self.create_private_connection: gapic_v1.method.wrap_method( - self.create_private_connection, - default_timeout=60.0, - client_info=client_info, - ), - self.get_private_connection: gapic_v1.method.wrap_method( - self.get_private_connection, - default_timeout=60.0, - client_info=client_info, - ), - self.list_private_connections: gapic_v1.method.wrap_method( - self.list_private_connections, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_private_connection: gapic_v1.method.wrap_method( - self.delete_private_connection, - default_timeout=60.0, - client_info=client_info, - ), - self.get_conversion_workspace: gapic_v1.method.wrap_method( - self.get_conversion_workspace, - default_timeout=60.0, - client_info=client_info, - ), - self.list_conversion_workspaces: gapic_v1.method.wrap_method( - self.list_conversion_workspaces, - default_timeout=60.0, - client_info=client_info, - ), - self.create_conversion_workspace: gapic_v1.method.wrap_method( - self.create_conversion_workspace, - default_timeout=60.0, - client_info=client_info, - ), - self.update_conversion_workspace: gapic_v1.method.wrap_method( - self.update_conversion_workspace, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_conversion_workspace: gapic_v1.method.wrap_method( - self.delete_conversion_workspace, - default_timeout=60.0, - client_info=client_info, - ), - self.seed_conversion_workspace: gapic_v1.method.wrap_method( - self.seed_conversion_workspace, - default_timeout=60.0, - client_info=client_info, - ), - self.import_mapping_rules: gapic_v1.method.wrap_method( - self.import_mapping_rules, - default_timeout=60.0, - client_info=client_info, - ), - self.convert_conversion_workspace: gapic_v1.method.wrap_method( - self.convert_conversion_workspace, - default_timeout=60.0, - client_info=client_info, - ), - self.commit_conversion_workspace: gapic_v1.method.wrap_method( - self.commit_conversion_workspace, - default_timeout=60.0, - client_info=client_info, - ), - self.rollback_conversion_workspace: gapic_v1.method.wrap_method( - self.rollback_conversion_workspace, - default_timeout=60.0, - client_info=client_info, - ), - self.apply_conversion_workspace: gapic_v1.method.wrap_method( - self.apply_conversion_workspace, - default_timeout=60.0, - client_info=client_info, - ), - self.describe_database_entities: gapic_v1.method.wrap_method( - self.describe_database_entities, - default_timeout=60.0, - client_info=client_info, - ), - self.search_background_jobs: gapic_v1.method.wrap_method( - self.search_background_jobs, - default_timeout=60.0, - client_info=client_info, - ), - self.describe_conversion_workspace_revisions: gapic_v1.method.wrap_method( - self.describe_conversion_workspace_revisions, - default_timeout=60.0, - client_info=client_info, - ), - self.fetch_static_ips: gapic_v1.method.wrap_method( - self.fetch_static_ips, - default_timeout=60.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_migration_jobs(self) -> Callable[ - [clouddms.ListMigrationJobsRequest], - Union[ - clouddms.ListMigrationJobsResponse, - Awaitable[clouddms.ListMigrationJobsResponse] - ]]: - raise NotImplementedError() - - @property - def get_migration_job(self) -> Callable[ - [clouddms.GetMigrationJobRequest], - Union[ - clouddms_resources.MigrationJob, - Awaitable[clouddms_resources.MigrationJob] - ]]: - raise NotImplementedError() - - @property - def create_migration_job(self) -> Callable[ - [clouddms.CreateMigrationJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_migration_job(self) -> Callable[ - [clouddms.UpdateMigrationJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_migration_job(self) -> Callable[ - [clouddms.DeleteMigrationJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def start_migration_job(self) -> Callable[ - [clouddms.StartMigrationJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def stop_migration_job(self) -> Callable[ - [clouddms.StopMigrationJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def resume_migration_job(self) -> Callable[ - [clouddms.ResumeMigrationJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def promote_migration_job(self) -> Callable[ - [clouddms.PromoteMigrationJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def verify_migration_job(self) -> Callable[ - [clouddms.VerifyMigrationJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def restart_migration_job(self) -> Callable[ - [clouddms.RestartMigrationJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def generate_ssh_script(self) -> Callable[ - [clouddms.GenerateSshScriptRequest], - Union[ - clouddms.SshScript, - Awaitable[clouddms.SshScript] - ]]: - raise NotImplementedError() - - @property - def list_connection_profiles(self) -> Callable[ - [clouddms.ListConnectionProfilesRequest], - Union[ - clouddms.ListConnectionProfilesResponse, - Awaitable[clouddms.ListConnectionProfilesResponse] - ]]: - raise NotImplementedError() - - @property - def get_connection_profile(self) -> Callable[ - [clouddms.GetConnectionProfileRequest], - Union[ - clouddms_resources.ConnectionProfile, - Awaitable[clouddms_resources.ConnectionProfile] - ]]: - raise NotImplementedError() - - @property - def create_connection_profile(self) -> Callable[ - [clouddms.CreateConnectionProfileRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_connection_profile(self) -> Callable[ - [clouddms.UpdateConnectionProfileRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_connection_profile(self) -> Callable[ - [clouddms.DeleteConnectionProfileRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def create_private_connection(self) -> Callable[ - [clouddms.CreatePrivateConnectionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_private_connection(self) -> Callable[ - [clouddms.GetPrivateConnectionRequest], - Union[ - clouddms_resources.PrivateConnection, - Awaitable[clouddms_resources.PrivateConnection] - ]]: - raise NotImplementedError() - - @property - def list_private_connections(self) -> Callable[ - [clouddms.ListPrivateConnectionsRequest], - Union[ - clouddms.ListPrivateConnectionsResponse, - Awaitable[clouddms.ListPrivateConnectionsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_private_connection(self) -> Callable[ - [clouddms.DeletePrivateConnectionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_conversion_workspace(self) -> Callable[ - [clouddms.GetConversionWorkspaceRequest], - Union[ - conversionworkspace_resources.ConversionWorkspace, - Awaitable[conversionworkspace_resources.ConversionWorkspace] - ]]: - raise NotImplementedError() - - @property - def list_conversion_workspaces(self) -> Callable[ - [clouddms.ListConversionWorkspacesRequest], - Union[ - clouddms.ListConversionWorkspacesResponse, - Awaitable[clouddms.ListConversionWorkspacesResponse] - ]]: - raise NotImplementedError() - - @property - def create_conversion_workspace(self) -> Callable[ - [clouddms.CreateConversionWorkspaceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_conversion_workspace(self) -> Callable[ - [clouddms.UpdateConversionWorkspaceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_conversion_workspace(self) -> Callable[ - [clouddms.DeleteConversionWorkspaceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def seed_conversion_workspace(self) -> Callable[ - [clouddms.SeedConversionWorkspaceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def import_mapping_rules(self) -> Callable[ - [clouddms.ImportMappingRulesRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def convert_conversion_workspace(self) -> Callable[ - [clouddms.ConvertConversionWorkspaceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def commit_conversion_workspace(self) -> Callable[ - [clouddms.CommitConversionWorkspaceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def rollback_conversion_workspace(self) -> Callable[ - [clouddms.RollbackConversionWorkspaceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def apply_conversion_workspace(self) -> Callable[ - [clouddms.ApplyConversionWorkspaceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def describe_database_entities(self) -> Callable[ - [clouddms.DescribeDatabaseEntitiesRequest], - Union[ - clouddms.DescribeDatabaseEntitiesResponse, - Awaitable[clouddms.DescribeDatabaseEntitiesResponse] - ]]: - raise NotImplementedError() - - @property - def search_background_jobs(self) -> Callable[ - [clouddms.SearchBackgroundJobsRequest], - Union[ - clouddms.SearchBackgroundJobsResponse, - Awaitable[clouddms.SearchBackgroundJobsResponse] - ]]: - raise NotImplementedError() - - @property - def describe_conversion_workspace_revisions(self) -> Callable[ - [clouddms.DescribeConversionWorkspaceRevisionsRequest], - Union[ - clouddms.DescribeConversionWorkspaceRevisionsResponse, - Awaitable[clouddms.DescribeConversionWorkspaceRevisionsResponse] - ]]: - raise NotImplementedError() - - @property - def fetch_static_ips(self) -> Callable[ - [clouddms.FetchStaticIpsRequest], - Union[ - clouddms.FetchStaticIpsResponse, - Awaitable[clouddms.FetchStaticIpsResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DataMigrationServiceTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py deleted file mode 100644 index bfc3d2e..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py +++ /dev/null @@ -1,1430 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.clouddms_v1.types import clouddms -from google.cloud.clouddms_v1.types import clouddms_resources -from google.cloud.clouddms_v1.types import conversionworkspace_resources -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from .base import DataMigrationServiceTransport, DEFAULT_CLIENT_INFO - - -class DataMigrationServiceGrpcTransport(DataMigrationServiceTransport): - """gRPC backend transport for DataMigrationService. - - Database Migration service - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'datamigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'datamigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_migration_jobs(self) -> Callable[ - [clouddms.ListMigrationJobsRequest], - clouddms.ListMigrationJobsResponse]: - r"""Return a callable for the list migration jobs method over gRPC. - - Lists migration jobs in a given project and location. - - Returns: - Callable[[~.ListMigrationJobsRequest], - ~.ListMigrationJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_migration_jobs' not in self._stubs: - self._stubs['list_migration_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ListMigrationJobs', - request_serializer=clouddms.ListMigrationJobsRequest.serialize, - response_deserializer=clouddms.ListMigrationJobsResponse.deserialize, - ) - return self._stubs['list_migration_jobs'] - - @property - def get_migration_job(self) -> Callable[ - [clouddms.GetMigrationJobRequest], - clouddms_resources.MigrationJob]: - r"""Return a callable for the get migration job method over gRPC. - - Gets details of a single migration job. - - Returns: - Callable[[~.GetMigrationJobRequest], - ~.MigrationJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_migration_job' not in self._stubs: - self._stubs['get_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/GetMigrationJob', - request_serializer=clouddms.GetMigrationJobRequest.serialize, - response_deserializer=clouddms_resources.MigrationJob.deserialize, - ) - return self._stubs['get_migration_job'] - - @property - def create_migration_job(self) -> Callable[ - [clouddms.CreateMigrationJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the create migration job method over gRPC. - - Creates a new migration job in a given project and - location. - - Returns: - Callable[[~.CreateMigrationJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_migration_job' not in self._stubs: - self._stubs['create_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/CreateMigrationJob', - request_serializer=clouddms.CreateMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_migration_job'] - - @property - def update_migration_job(self) -> Callable[ - [clouddms.UpdateMigrationJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the update migration job method over gRPC. - - Updates the parameters of a single migration job. - - Returns: - Callable[[~.UpdateMigrationJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_migration_job' not in self._stubs: - self._stubs['update_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/UpdateMigrationJob', - request_serializer=clouddms.UpdateMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_migration_job'] - - @property - def delete_migration_job(self) -> Callable[ - [clouddms.DeleteMigrationJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete migration job method over gRPC. - - Deletes a single migration job. - - Returns: - Callable[[~.DeleteMigrationJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_migration_job' not in self._stubs: - self._stubs['delete_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/DeleteMigrationJob', - request_serializer=clouddms.DeleteMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_migration_job'] - - @property - def start_migration_job(self) -> Callable[ - [clouddms.StartMigrationJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the start migration job method over gRPC. - - Start an already created migration job. - - Returns: - Callable[[~.StartMigrationJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'start_migration_job' not in self._stubs: - self._stubs['start_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/StartMigrationJob', - request_serializer=clouddms.StartMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['start_migration_job'] - - @property - def stop_migration_job(self) -> Callable[ - [clouddms.StopMigrationJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the stop migration job method over gRPC. - - Stops a running migration job. - - Returns: - Callable[[~.StopMigrationJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'stop_migration_job' not in self._stubs: - self._stubs['stop_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/StopMigrationJob', - request_serializer=clouddms.StopMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['stop_migration_job'] - - @property - def resume_migration_job(self) -> Callable[ - [clouddms.ResumeMigrationJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the resume migration job method over gRPC. - - Resume a migration job that is currently stopped and - is resumable (was stopped during CDC phase). - - Returns: - Callable[[~.ResumeMigrationJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'resume_migration_job' not in self._stubs: - self._stubs['resume_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ResumeMigrationJob', - request_serializer=clouddms.ResumeMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['resume_migration_job'] - - @property - def promote_migration_job(self) -> Callable[ - [clouddms.PromoteMigrationJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the promote migration job method over gRPC. - - Promote a migration job, stopping replication to the - destination and promoting the destination to be a - standalone database. - - Returns: - Callable[[~.PromoteMigrationJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'promote_migration_job' not in self._stubs: - self._stubs['promote_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/PromoteMigrationJob', - request_serializer=clouddms.PromoteMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['promote_migration_job'] - - @property - def verify_migration_job(self) -> Callable[ - [clouddms.VerifyMigrationJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the verify migration job method over gRPC. - - Verify a migration job, making sure the destination - can reach the source and that all configuration and - prerequisites are met. - - Returns: - Callable[[~.VerifyMigrationJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'verify_migration_job' not in self._stubs: - self._stubs['verify_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/VerifyMigrationJob', - request_serializer=clouddms.VerifyMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['verify_migration_job'] - - @property - def restart_migration_job(self) -> Callable[ - [clouddms.RestartMigrationJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the restart migration job method over gRPC. - - Restart a stopped or failed migration job, resetting - the destination instance to its original state and - starting the migration process from scratch. - - Returns: - Callable[[~.RestartMigrationJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restart_migration_job' not in self._stubs: - self._stubs['restart_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/RestartMigrationJob', - request_serializer=clouddms.RestartMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['restart_migration_job'] - - @property - def generate_ssh_script(self) -> Callable[ - [clouddms.GenerateSshScriptRequest], - clouddms.SshScript]: - r"""Return a callable for the generate ssh script method over gRPC. - - Generate a SSH configuration script to configure the - reverse SSH connectivity. - - Returns: - Callable[[~.GenerateSshScriptRequest], - ~.SshScript]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'generate_ssh_script' not in self._stubs: - self._stubs['generate_ssh_script'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/GenerateSshScript', - request_serializer=clouddms.GenerateSshScriptRequest.serialize, - response_deserializer=clouddms.SshScript.deserialize, - ) - return self._stubs['generate_ssh_script'] - - @property - def list_connection_profiles(self) -> Callable[ - [clouddms.ListConnectionProfilesRequest], - clouddms.ListConnectionProfilesResponse]: - r"""Return a callable for the list connection profiles method over gRPC. - - Retrieves a list of all connection profiles in a - given project and location. - - Returns: - Callable[[~.ListConnectionProfilesRequest], - ~.ListConnectionProfilesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_connection_profiles' not in self._stubs: - self._stubs['list_connection_profiles'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ListConnectionProfiles', - request_serializer=clouddms.ListConnectionProfilesRequest.serialize, - response_deserializer=clouddms.ListConnectionProfilesResponse.deserialize, - ) - return self._stubs['list_connection_profiles'] - - @property - def get_connection_profile(self) -> Callable[ - [clouddms.GetConnectionProfileRequest], - clouddms_resources.ConnectionProfile]: - r"""Return a callable for the get connection profile method over gRPC. - - Gets details of a single connection profile. - - Returns: - Callable[[~.GetConnectionProfileRequest], - ~.ConnectionProfile]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_connection_profile' not in self._stubs: - self._stubs['get_connection_profile'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/GetConnectionProfile', - request_serializer=clouddms.GetConnectionProfileRequest.serialize, - response_deserializer=clouddms_resources.ConnectionProfile.deserialize, - ) - return self._stubs['get_connection_profile'] - - @property - def create_connection_profile(self) -> Callable[ - [clouddms.CreateConnectionProfileRequest], - operations_pb2.Operation]: - r"""Return a callable for the create connection profile method over gRPC. - - Creates a new connection profile in a given project - and location. - - Returns: - Callable[[~.CreateConnectionProfileRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_connection_profile' not in self._stubs: - self._stubs['create_connection_profile'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/CreateConnectionProfile', - request_serializer=clouddms.CreateConnectionProfileRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_connection_profile'] - - @property - def update_connection_profile(self) -> Callable[ - [clouddms.UpdateConnectionProfileRequest], - operations_pb2.Operation]: - r"""Return a callable for the update connection profile method over gRPC. - - Update the configuration of a single connection - profile. - - Returns: - Callable[[~.UpdateConnectionProfileRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_connection_profile' not in self._stubs: - self._stubs['update_connection_profile'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/UpdateConnectionProfile', - request_serializer=clouddms.UpdateConnectionProfileRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_connection_profile'] - - @property - def delete_connection_profile(self) -> Callable[ - [clouddms.DeleteConnectionProfileRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete connection profile method over gRPC. - - Deletes a single Database Migration Service - connection profile. A connection profile can only be - deleted if it is not in use by any active migration - jobs. - - Returns: - Callable[[~.DeleteConnectionProfileRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_connection_profile' not in self._stubs: - self._stubs['delete_connection_profile'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/DeleteConnectionProfile', - request_serializer=clouddms.DeleteConnectionProfileRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_connection_profile'] - - @property - def create_private_connection(self) -> Callable[ - [clouddms.CreatePrivateConnectionRequest], - operations_pb2.Operation]: - r"""Return a callable for the create private connection method over gRPC. - - Creates a new private connection in a given project - and location. - - Returns: - Callable[[~.CreatePrivateConnectionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_private_connection' not in self._stubs: - self._stubs['create_private_connection'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/CreatePrivateConnection', - request_serializer=clouddms.CreatePrivateConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_private_connection'] - - @property - def get_private_connection(self) -> Callable[ - [clouddms.GetPrivateConnectionRequest], - clouddms_resources.PrivateConnection]: - r"""Return a callable for the get private connection method over gRPC. - - Gets details of a single private connection. - - Returns: - Callable[[~.GetPrivateConnectionRequest], - ~.PrivateConnection]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_private_connection' not in self._stubs: - self._stubs['get_private_connection'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/GetPrivateConnection', - request_serializer=clouddms.GetPrivateConnectionRequest.serialize, - response_deserializer=clouddms_resources.PrivateConnection.deserialize, - ) - return self._stubs['get_private_connection'] - - @property - def list_private_connections(self) -> Callable[ - [clouddms.ListPrivateConnectionsRequest], - clouddms.ListPrivateConnectionsResponse]: - r"""Return a callable for the list private connections method over gRPC. - - Retrieves a list of private connections in a given - project and location. - - Returns: - Callable[[~.ListPrivateConnectionsRequest], - ~.ListPrivateConnectionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_private_connections' not in self._stubs: - self._stubs['list_private_connections'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ListPrivateConnections', - request_serializer=clouddms.ListPrivateConnectionsRequest.serialize, - response_deserializer=clouddms.ListPrivateConnectionsResponse.deserialize, - ) - return self._stubs['list_private_connections'] - - @property - def delete_private_connection(self) -> Callable[ - [clouddms.DeletePrivateConnectionRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete private connection method over gRPC. - - Deletes a single Database Migration Service private - connection. - - Returns: - Callable[[~.DeletePrivateConnectionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_private_connection' not in self._stubs: - self._stubs['delete_private_connection'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/DeletePrivateConnection', - request_serializer=clouddms.DeletePrivateConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_private_connection'] - - @property - def get_conversion_workspace(self) -> Callable[ - [clouddms.GetConversionWorkspaceRequest], - conversionworkspace_resources.ConversionWorkspace]: - r"""Return a callable for the get conversion workspace method over gRPC. - - Gets details of a single conversion workspace. - - Returns: - Callable[[~.GetConversionWorkspaceRequest], - ~.ConversionWorkspace]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_conversion_workspace' not in self._stubs: - self._stubs['get_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/GetConversionWorkspace', - request_serializer=clouddms.GetConversionWorkspaceRequest.serialize, - response_deserializer=conversionworkspace_resources.ConversionWorkspace.deserialize, - ) - return self._stubs['get_conversion_workspace'] - - @property - def list_conversion_workspaces(self) -> Callable[ - [clouddms.ListConversionWorkspacesRequest], - clouddms.ListConversionWorkspacesResponse]: - r"""Return a callable for the list conversion workspaces method over gRPC. - - Lists conversion workspaces in a given project and - location. - - Returns: - Callable[[~.ListConversionWorkspacesRequest], - ~.ListConversionWorkspacesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_conversion_workspaces' not in self._stubs: - self._stubs['list_conversion_workspaces'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ListConversionWorkspaces', - request_serializer=clouddms.ListConversionWorkspacesRequest.serialize, - response_deserializer=clouddms.ListConversionWorkspacesResponse.deserialize, - ) - return self._stubs['list_conversion_workspaces'] - - @property - def create_conversion_workspace(self) -> Callable[ - [clouddms.CreateConversionWorkspaceRequest], - operations_pb2.Operation]: - r"""Return a callable for the create conversion workspace method over gRPC. - - Creates a new conversion workspace in a given project - and location. - - Returns: - Callable[[~.CreateConversionWorkspaceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_conversion_workspace' not in self._stubs: - self._stubs['create_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/CreateConversionWorkspace', - request_serializer=clouddms.CreateConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_conversion_workspace'] - - @property - def update_conversion_workspace(self) -> Callable[ - [clouddms.UpdateConversionWorkspaceRequest], - operations_pb2.Operation]: - r"""Return a callable for the update conversion workspace method over gRPC. - - Updates the parameters of a single conversion - workspace. - - Returns: - Callable[[~.UpdateConversionWorkspaceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_conversion_workspace' not in self._stubs: - self._stubs['update_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/UpdateConversionWorkspace', - request_serializer=clouddms.UpdateConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_conversion_workspace'] - - @property - def delete_conversion_workspace(self) -> Callable[ - [clouddms.DeleteConversionWorkspaceRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete conversion workspace method over gRPC. - - Deletes a single conversion workspace. - - Returns: - Callable[[~.DeleteConversionWorkspaceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_conversion_workspace' not in self._stubs: - self._stubs['delete_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/DeleteConversionWorkspace', - request_serializer=clouddms.DeleteConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_conversion_workspace'] - - @property - def seed_conversion_workspace(self) -> Callable[ - [clouddms.SeedConversionWorkspaceRequest], - operations_pb2.Operation]: - r"""Return a callable for the seed conversion workspace method over gRPC. - - Imports a snapshot of the source database into the - conversion workspace. - - Returns: - Callable[[~.SeedConversionWorkspaceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'seed_conversion_workspace' not in self._stubs: - self._stubs['seed_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/SeedConversionWorkspace', - request_serializer=clouddms.SeedConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['seed_conversion_workspace'] - - @property - def import_mapping_rules(self) -> Callable[ - [clouddms.ImportMappingRulesRequest], - operations_pb2.Operation]: - r"""Return a callable for the import mapping rules method over gRPC. - - Imports the mapping rules for a given conversion - workspace. Supports various formats of external rules - files. - - Returns: - Callable[[~.ImportMappingRulesRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_mapping_rules' not in self._stubs: - self._stubs['import_mapping_rules'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ImportMappingRules', - request_serializer=clouddms.ImportMappingRulesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['import_mapping_rules'] - - @property - def convert_conversion_workspace(self) -> Callable[ - [clouddms.ConvertConversionWorkspaceRequest], - operations_pb2.Operation]: - r"""Return a callable for the convert conversion workspace method over gRPC. - - Creates a draft tree schema for the destination - database. - - Returns: - Callable[[~.ConvertConversionWorkspaceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'convert_conversion_workspace' not in self._stubs: - self._stubs['convert_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ConvertConversionWorkspace', - request_serializer=clouddms.ConvertConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['convert_conversion_workspace'] - - @property - def commit_conversion_workspace(self) -> Callable[ - [clouddms.CommitConversionWorkspaceRequest], - operations_pb2.Operation]: - r"""Return a callable for the commit conversion workspace method over gRPC. - - Marks all the data in the conversion workspace as - committed. - - Returns: - Callable[[~.CommitConversionWorkspaceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'commit_conversion_workspace' not in self._stubs: - self._stubs['commit_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/CommitConversionWorkspace', - request_serializer=clouddms.CommitConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['commit_conversion_workspace'] - - @property - def rollback_conversion_workspace(self) -> Callable[ - [clouddms.RollbackConversionWorkspaceRequest], - operations_pb2.Operation]: - r"""Return a callable for the rollback conversion workspace method over gRPC. - - Rolls back a conversion workspace to the last - committed snapshot. - - Returns: - Callable[[~.RollbackConversionWorkspaceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rollback_conversion_workspace' not in self._stubs: - self._stubs['rollback_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/RollbackConversionWorkspace', - request_serializer=clouddms.RollbackConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['rollback_conversion_workspace'] - - @property - def apply_conversion_workspace(self) -> Callable[ - [clouddms.ApplyConversionWorkspaceRequest], - operations_pb2.Operation]: - r"""Return a callable for the apply conversion workspace method over gRPC. - - Applies draft tree onto a specific destination - database. - - Returns: - Callable[[~.ApplyConversionWorkspaceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'apply_conversion_workspace' not in self._stubs: - self._stubs['apply_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ApplyConversionWorkspace', - request_serializer=clouddms.ApplyConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['apply_conversion_workspace'] - - @property - def describe_database_entities(self) -> Callable[ - [clouddms.DescribeDatabaseEntitiesRequest], - clouddms.DescribeDatabaseEntitiesResponse]: - r"""Return a callable for the describe database entities method over gRPC. - - Describes the database entities tree for a specific - conversion workspace and a specific tree type. - - Database entities are not resources like conversion - workspaces or mapping rules, and they can't be created, - updated or deleted. Instead, they are simple data - objects describing the structure of the client database. - - Returns: - Callable[[~.DescribeDatabaseEntitiesRequest], - ~.DescribeDatabaseEntitiesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'describe_database_entities' not in self._stubs: - self._stubs['describe_database_entities'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/DescribeDatabaseEntities', - request_serializer=clouddms.DescribeDatabaseEntitiesRequest.serialize, - response_deserializer=clouddms.DescribeDatabaseEntitiesResponse.deserialize, - ) - return self._stubs['describe_database_entities'] - - @property - def search_background_jobs(self) -> Callable[ - [clouddms.SearchBackgroundJobsRequest], - clouddms.SearchBackgroundJobsResponse]: - r"""Return a callable for the search background jobs method over gRPC. - - Searches/lists the background jobs for a specific - conversion workspace. - - The background jobs are not resources like conversion - workspaces or mapping rules, and they can't be created, - updated or deleted. Instead, they are a way to expose - the data plane jobs log. - - Returns: - Callable[[~.SearchBackgroundJobsRequest], - ~.SearchBackgroundJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_background_jobs' not in self._stubs: - self._stubs['search_background_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/SearchBackgroundJobs', - request_serializer=clouddms.SearchBackgroundJobsRequest.serialize, - response_deserializer=clouddms.SearchBackgroundJobsResponse.deserialize, - ) - return self._stubs['search_background_jobs'] - - @property - def describe_conversion_workspace_revisions(self) -> Callable[ - [clouddms.DescribeConversionWorkspaceRevisionsRequest], - clouddms.DescribeConversionWorkspaceRevisionsResponse]: - r"""Return a callable for the describe conversion workspace - revisions method over gRPC. - - Retrieves a list of committed revisions of a specific - conversion workspace. - - Returns: - Callable[[~.DescribeConversionWorkspaceRevisionsRequest], - ~.DescribeConversionWorkspaceRevisionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'describe_conversion_workspace_revisions' not in self._stubs: - self._stubs['describe_conversion_workspace_revisions'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/DescribeConversionWorkspaceRevisions', - request_serializer=clouddms.DescribeConversionWorkspaceRevisionsRequest.serialize, - response_deserializer=clouddms.DescribeConversionWorkspaceRevisionsResponse.deserialize, - ) - return self._stubs['describe_conversion_workspace_revisions'] - - @property - def fetch_static_ips(self) -> Callable[ - [clouddms.FetchStaticIpsRequest], - clouddms.FetchStaticIpsResponse]: - r"""Return a callable for the fetch static ips method over gRPC. - - Fetches a set of static IP addresses that need to be - allowlisted by the customer when using the static-IP - connectivity method. - - Returns: - Callable[[~.FetchStaticIpsRequest], - ~.FetchStaticIpsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_static_ips' not in self._stubs: - self._stubs['fetch_static_ips'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/FetchStaticIps', - request_serializer=clouddms.FetchStaticIpsRequest.serialize, - response_deserializer=clouddms.FetchStaticIpsResponse.deserialize, - ) - return self._stubs['fetch_static_ips'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DataMigrationServiceGrpcTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py deleted file mode 100644 index 0dce4cb..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,1429 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.clouddms_v1.types import clouddms -from google.cloud.clouddms_v1.types import clouddms_resources -from google.cloud.clouddms_v1.types import conversionworkspace_resources -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from .base import DataMigrationServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import DataMigrationServiceGrpcTransport - - -class DataMigrationServiceGrpcAsyncIOTransport(DataMigrationServiceTransport): - """gRPC AsyncIO backend transport for DataMigrationService. - - Database Migration service - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'datamigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'datamigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_migration_jobs(self) -> Callable[ - [clouddms.ListMigrationJobsRequest], - Awaitable[clouddms.ListMigrationJobsResponse]]: - r"""Return a callable for the list migration jobs method over gRPC. - - Lists migration jobs in a given project and location. - - Returns: - Callable[[~.ListMigrationJobsRequest], - Awaitable[~.ListMigrationJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_migration_jobs' not in self._stubs: - self._stubs['list_migration_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ListMigrationJobs', - request_serializer=clouddms.ListMigrationJobsRequest.serialize, - response_deserializer=clouddms.ListMigrationJobsResponse.deserialize, - ) - return self._stubs['list_migration_jobs'] - - @property - def get_migration_job(self) -> Callable[ - [clouddms.GetMigrationJobRequest], - Awaitable[clouddms_resources.MigrationJob]]: - r"""Return a callable for the get migration job method over gRPC. - - Gets details of a single migration job. - - Returns: - Callable[[~.GetMigrationJobRequest], - Awaitable[~.MigrationJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_migration_job' not in self._stubs: - self._stubs['get_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/GetMigrationJob', - request_serializer=clouddms.GetMigrationJobRequest.serialize, - response_deserializer=clouddms_resources.MigrationJob.deserialize, - ) - return self._stubs['get_migration_job'] - - @property - def create_migration_job(self) -> Callable[ - [clouddms.CreateMigrationJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create migration job method over gRPC. - - Creates a new migration job in a given project and - location. - - Returns: - Callable[[~.CreateMigrationJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_migration_job' not in self._stubs: - self._stubs['create_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/CreateMigrationJob', - request_serializer=clouddms.CreateMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_migration_job'] - - @property - def update_migration_job(self) -> Callable[ - [clouddms.UpdateMigrationJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update migration job method over gRPC. - - Updates the parameters of a single migration job. - - Returns: - Callable[[~.UpdateMigrationJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_migration_job' not in self._stubs: - self._stubs['update_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/UpdateMigrationJob', - request_serializer=clouddms.UpdateMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_migration_job'] - - @property - def delete_migration_job(self) -> Callable[ - [clouddms.DeleteMigrationJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete migration job method over gRPC. - - Deletes a single migration job. - - Returns: - Callable[[~.DeleteMigrationJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_migration_job' not in self._stubs: - self._stubs['delete_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/DeleteMigrationJob', - request_serializer=clouddms.DeleteMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_migration_job'] - - @property - def start_migration_job(self) -> Callable[ - [clouddms.StartMigrationJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the start migration job method over gRPC. - - Start an already created migration job. - - Returns: - Callable[[~.StartMigrationJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'start_migration_job' not in self._stubs: - self._stubs['start_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/StartMigrationJob', - request_serializer=clouddms.StartMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['start_migration_job'] - - @property - def stop_migration_job(self) -> Callable[ - [clouddms.StopMigrationJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the stop migration job method over gRPC. - - Stops a running migration job. - - Returns: - Callable[[~.StopMigrationJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'stop_migration_job' not in self._stubs: - self._stubs['stop_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/StopMigrationJob', - request_serializer=clouddms.StopMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['stop_migration_job'] - - @property - def resume_migration_job(self) -> Callable[ - [clouddms.ResumeMigrationJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the resume migration job method over gRPC. - - Resume a migration job that is currently stopped and - is resumable (was stopped during CDC phase). - - Returns: - Callable[[~.ResumeMigrationJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'resume_migration_job' not in self._stubs: - self._stubs['resume_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ResumeMigrationJob', - request_serializer=clouddms.ResumeMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['resume_migration_job'] - - @property - def promote_migration_job(self) -> Callable[ - [clouddms.PromoteMigrationJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the promote migration job method over gRPC. - - Promote a migration job, stopping replication to the - destination and promoting the destination to be a - standalone database. - - Returns: - Callable[[~.PromoteMigrationJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'promote_migration_job' not in self._stubs: - self._stubs['promote_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/PromoteMigrationJob', - request_serializer=clouddms.PromoteMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['promote_migration_job'] - - @property - def verify_migration_job(self) -> Callable[ - [clouddms.VerifyMigrationJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the verify migration job method over gRPC. - - Verify a migration job, making sure the destination - can reach the source and that all configuration and - prerequisites are met. - - Returns: - Callable[[~.VerifyMigrationJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'verify_migration_job' not in self._stubs: - self._stubs['verify_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/VerifyMigrationJob', - request_serializer=clouddms.VerifyMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['verify_migration_job'] - - @property - def restart_migration_job(self) -> Callable[ - [clouddms.RestartMigrationJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the restart migration job method over gRPC. - - Restart a stopped or failed migration job, resetting - the destination instance to its original state and - starting the migration process from scratch. - - Returns: - Callable[[~.RestartMigrationJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restart_migration_job' not in self._stubs: - self._stubs['restart_migration_job'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/RestartMigrationJob', - request_serializer=clouddms.RestartMigrationJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['restart_migration_job'] - - @property - def generate_ssh_script(self) -> Callable[ - [clouddms.GenerateSshScriptRequest], - Awaitable[clouddms.SshScript]]: - r"""Return a callable for the generate ssh script method over gRPC. - - Generate a SSH configuration script to configure the - reverse SSH connectivity. - - Returns: - Callable[[~.GenerateSshScriptRequest], - Awaitable[~.SshScript]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'generate_ssh_script' not in self._stubs: - self._stubs['generate_ssh_script'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/GenerateSshScript', - request_serializer=clouddms.GenerateSshScriptRequest.serialize, - response_deserializer=clouddms.SshScript.deserialize, - ) - return self._stubs['generate_ssh_script'] - - @property - def list_connection_profiles(self) -> Callable[ - [clouddms.ListConnectionProfilesRequest], - Awaitable[clouddms.ListConnectionProfilesResponse]]: - r"""Return a callable for the list connection profiles method over gRPC. - - Retrieves a list of all connection profiles in a - given project and location. - - Returns: - Callable[[~.ListConnectionProfilesRequest], - Awaitable[~.ListConnectionProfilesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_connection_profiles' not in self._stubs: - self._stubs['list_connection_profiles'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ListConnectionProfiles', - request_serializer=clouddms.ListConnectionProfilesRequest.serialize, - response_deserializer=clouddms.ListConnectionProfilesResponse.deserialize, - ) - return self._stubs['list_connection_profiles'] - - @property - def get_connection_profile(self) -> Callable[ - [clouddms.GetConnectionProfileRequest], - Awaitable[clouddms_resources.ConnectionProfile]]: - r"""Return a callable for the get connection profile method over gRPC. - - Gets details of a single connection profile. - - Returns: - Callable[[~.GetConnectionProfileRequest], - Awaitable[~.ConnectionProfile]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_connection_profile' not in self._stubs: - self._stubs['get_connection_profile'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/GetConnectionProfile', - request_serializer=clouddms.GetConnectionProfileRequest.serialize, - response_deserializer=clouddms_resources.ConnectionProfile.deserialize, - ) - return self._stubs['get_connection_profile'] - - @property - def create_connection_profile(self) -> Callable[ - [clouddms.CreateConnectionProfileRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create connection profile method over gRPC. - - Creates a new connection profile in a given project - and location. - - Returns: - Callable[[~.CreateConnectionProfileRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_connection_profile' not in self._stubs: - self._stubs['create_connection_profile'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/CreateConnectionProfile', - request_serializer=clouddms.CreateConnectionProfileRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_connection_profile'] - - @property - def update_connection_profile(self) -> Callable[ - [clouddms.UpdateConnectionProfileRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update connection profile method over gRPC. - - Update the configuration of a single connection - profile. - - Returns: - Callable[[~.UpdateConnectionProfileRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_connection_profile' not in self._stubs: - self._stubs['update_connection_profile'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/UpdateConnectionProfile', - request_serializer=clouddms.UpdateConnectionProfileRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_connection_profile'] - - @property - def delete_connection_profile(self) -> Callable[ - [clouddms.DeleteConnectionProfileRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete connection profile method over gRPC. - - Deletes a single Database Migration Service - connection profile. A connection profile can only be - deleted if it is not in use by any active migration - jobs. - - Returns: - Callable[[~.DeleteConnectionProfileRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_connection_profile' not in self._stubs: - self._stubs['delete_connection_profile'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/DeleteConnectionProfile', - request_serializer=clouddms.DeleteConnectionProfileRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_connection_profile'] - - @property - def create_private_connection(self) -> Callable[ - [clouddms.CreatePrivateConnectionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create private connection method over gRPC. - - Creates a new private connection in a given project - and location. - - Returns: - Callable[[~.CreatePrivateConnectionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_private_connection' not in self._stubs: - self._stubs['create_private_connection'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/CreatePrivateConnection', - request_serializer=clouddms.CreatePrivateConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_private_connection'] - - @property - def get_private_connection(self) -> Callable[ - [clouddms.GetPrivateConnectionRequest], - Awaitable[clouddms_resources.PrivateConnection]]: - r"""Return a callable for the get private connection method over gRPC. - - Gets details of a single private connection. - - Returns: - Callable[[~.GetPrivateConnectionRequest], - Awaitable[~.PrivateConnection]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_private_connection' not in self._stubs: - self._stubs['get_private_connection'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/GetPrivateConnection', - request_serializer=clouddms.GetPrivateConnectionRequest.serialize, - response_deserializer=clouddms_resources.PrivateConnection.deserialize, - ) - return self._stubs['get_private_connection'] - - @property - def list_private_connections(self) -> Callable[ - [clouddms.ListPrivateConnectionsRequest], - Awaitable[clouddms.ListPrivateConnectionsResponse]]: - r"""Return a callable for the list private connections method over gRPC. - - Retrieves a list of private connections in a given - project and location. - - Returns: - Callable[[~.ListPrivateConnectionsRequest], - Awaitable[~.ListPrivateConnectionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_private_connections' not in self._stubs: - self._stubs['list_private_connections'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ListPrivateConnections', - request_serializer=clouddms.ListPrivateConnectionsRequest.serialize, - response_deserializer=clouddms.ListPrivateConnectionsResponse.deserialize, - ) - return self._stubs['list_private_connections'] - - @property - def delete_private_connection(self) -> Callable[ - [clouddms.DeletePrivateConnectionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete private connection method over gRPC. - - Deletes a single Database Migration Service private - connection. - - Returns: - Callable[[~.DeletePrivateConnectionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_private_connection' not in self._stubs: - self._stubs['delete_private_connection'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/DeletePrivateConnection', - request_serializer=clouddms.DeletePrivateConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_private_connection'] - - @property - def get_conversion_workspace(self) -> Callable[ - [clouddms.GetConversionWorkspaceRequest], - Awaitable[conversionworkspace_resources.ConversionWorkspace]]: - r"""Return a callable for the get conversion workspace method over gRPC. - - Gets details of a single conversion workspace. - - Returns: - Callable[[~.GetConversionWorkspaceRequest], - Awaitable[~.ConversionWorkspace]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_conversion_workspace' not in self._stubs: - self._stubs['get_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/GetConversionWorkspace', - request_serializer=clouddms.GetConversionWorkspaceRequest.serialize, - response_deserializer=conversionworkspace_resources.ConversionWorkspace.deserialize, - ) - return self._stubs['get_conversion_workspace'] - - @property - def list_conversion_workspaces(self) -> Callable[ - [clouddms.ListConversionWorkspacesRequest], - Awaitable[clouddms.ListConversionWorkspacesResponse]]: - r"""Return a callable for the list conversion workspaces method over gRPC. - - Lists conversion workspaces in a given project and - location. - - Returns: - Callable[[~.ListConversionWorkspacesRequest], - Awaitable[~.ListConversionWorkspacesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_conversion_workspaces' not in self._stubs: - self._stubs['list_conversion_workspaces'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ListConversionWorkspaces', - request_serializer=clouddms.ListConversionWorkspacesRequest.serialize, - response_deserializer=clouddms.ListConversionWorkspacesResponse.deserialize, - ) - return self._stubs['list_conversion_workspaces'] - - @property - def create_conversion_workspace(self) -> Callable[ - [clouddms.CreateConversionWorkspaceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create conversion workspace method over gRPC. - - Creates a new conversion workspace in a given project - and location. - - Returns: - Callable[[~.CreateConversionWorkspaceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_conversion_workspace' not in self._stubs: - self._stubs['create_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/CreateConversionWorkspace', - request_serializer=clouddms.CreateConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_conversion_workspace'] - - @property - def update_conversion_workspace(self) -> Callable[ - [clouddms.UpdateConversionWorkspaceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update conversion workspace method over gRPC. - - Updates the parameters of a single conversion - workspace. - - Returns: - Callable[[~.UpdateConversionWorkspaceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_conversion_workspace' not in self._stubs: - self._stubs['update_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/UpdateConversionWorkspace', - request_serializer=clouddms.UpdateConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_conversion_workspace'] - - @property - def delete_conversion_workspace(self) -> Callable[ - [clouddms.DeleteConversionWorkspaceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete conversion workspace method over gRPC. - - Deletes a single conversion workspace. - - Returns: - Callable[[~.DeleteConversionWorkspaceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_conversion_workspace' not in self._stubs: - self._stubs['delete_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/DeleteConversionWorkspace', - request_serializer=clouddms.DeleteConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_conversion_workspace'] - - @property - def seed_conversion_workspace(self) -> Callable[ - [clouddms.SeedConversionWorkspaceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the seed conversion workspace method over gRPC. - - Imports a snapshot of the source database into the - conversion workspace. - - Returns: - Callable[[~.SeedConversionWorkspaceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'seed_conversion_workspace' not in self._stubs: - self._stubs['seed_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/SeedConversionWorkspace', - request_serializer=clouddms.SeedConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['seed_conversion_workspace'] - - @property - def import_mapping_rules(self) -> Callable[ - [clouddms.ImportMappingRulesRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the import mapping rules method over gRPC. - - Imports the mapping rules for a given conversion - workspace. Supports various formats of external rules - files. - - Returns: - Callable[[~.ImportMappingRulesRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_mapping_rules' not in self._stubs: - self._stubs['import_mapping_rules'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ImportMappingRules', - request_serializer=clouddms.ImportMappingRulesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['import_mapping_rules'] - - @property - def convert_conversion_workspace(self) -> Callable[ - [clouddms.ConvertConversionWorkspaceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the convert conversion workspace method over gRPC. - - Creates a draft tree schema for the destination - database. - - Returns: - Callable[[~.ConvertConversionWorkspaceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'convert_conversion_workspace' not in self._stubs: - self._stubs['convert_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ConvertConversionWorkspace', - request_serializer=clouddms.ConvertConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['convert_conversion_workspace'] - - @property - def commit_conversion_workspace(self) -> Callable[ - [clouddms.CommitConversionWorkspaceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the commit conversion workspace method over gRPC. - - Marks all the data in the conversion workspace as - committed. - - Returns: - Callable[[~.CommitConversionWorkspaceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'commit_conversion_workspace' not in self._stubs: - self._stubs['commit_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/CommitConversionWorkspace', - request_serializer=clouddms.CommitConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['commit_conversion_workspace'] - - @property - def rollback_conversion_workspace(self) -> Callable[ - [clouddms.RollbackConversionWorkspaceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the rollback conversion workspace method over gRPC. - - Rolls back a conversion workspace to the last - committed snapshot. - - Returns: - Callable[[~.RollbackConversionWorkspaceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rollback_conversion_workspace' not in self._stubs: - self._stubs['rollback_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/RollbackConversionWorkspace', - request_serializer=clouddms.RollbackConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['rollback_conversion_workspace'] - - @property - def apply_conversion_workspace(self) -> Callable[ - [clouddms.ApplyConversionWorkspaceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the apply conversion workspace method over gRPC. - - Applies draft tree onto a specific destination - database. - - Returns: - Callable[[~.ApplyConversionWorkspaceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'apply_conversion_workspace' not in self._stubs: - self._stubs['apply_conversion_workspace'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/ApplyConversionWorkspace', - request_serializer=clouddms.ApplyConversionWorkspaceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['apply_conversion_workspace'] - - @property - def describe_database_entities(self) -> Callable[ - [clouddms.DescribeDatabaseEntitiesRequest], - Awaitable[clouddms.DescribeDatabaseEntitiesResponse]]: - r"""Return a callable for the describe database entities method over gRPC. - - Describes the database entities tree for a specific - conversion workspace and a specific tree type. - - Database entities are not resources like conversion - workspaces or mapping rules, and they can't be created, - updated or deleted. Instead, they are simple data - objects describing the structure of the client database. - - Returns: - Callable[[~.DescribeDatabaseEntitiesRequest], - Awaitable[~.DescribeDatabaseEntitiesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'describe_database_entities' not in self._stubs: - self._stubs['describe_database_entities'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/DescribeDatabaseEntities', - request_serializer=clouddms.DescribeDatabaseEntitiesRequest.serialize, - response_deserializer=clouddms.DescribeDatabaseEntitiesResponse.deserialize, - ) - return self._stubs['describe_database_entities'] - - @property - def search_background_jobs(self) -> Callable[ - [clouddms.SearchBackgroundJobsRequest], - Awaitable[clouddms.SearchBackgroundJobsResponse]]: - r"""Return a callable for the search background jobs method over gRPC. - - Searches/lists the background jobs for a specific - conversion workspace. - - The background jobs are not resources like conversion - workspaces or mapping rules, and they can't be created, - updated or deleted. Instead, they are a way to expose - the data plane jobs log. - - Returns: - Callable[[~.SearchBackgroundJobsRequest], - Awaitable[~.SearchBackgroundJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_background_jobs' not in self._stubs: - self._stubs['search_background_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/SearchBackgroundJobs', - request_serializer=clouddms.SearchBackgroundJobsRequest.serialize, - response_deserializer=clouddms.SearchBackgroundJobsResponse.deserialize, - ) - return self._stubs['search_background_jobs'] - - @property - def describe_conversion_workspace_revisions(self) -> Callable[ - [clouddms.DescribeConversionWorkspaceRevisionsRequest], - Awaitable[clouddms.DescribeConversionWorkspaceRevisionsResponse]]: - r"""Return a callable for the describe conversion workspace - revisions method over gRPC. - - Retrieves a list of committed revisions of a specific - conversion workspace. - - Returns: - Callable[[~.DescribeConversionWorkspaceRevisionsRequest], - Awaitable[~.DescribeConversionWorkspaceRevisionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'describe_conversion_workspace_revisions' not in self._stubs: - self._stubs['describe_conversion_workspace_revisions'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/DescribeConversionWorkspaceRevisions', - request_serializer=clouddms.DescribeConversionWorkspaceRevisionsRequest.serialize, - response_deserializer=clouddms.DescribeConversionWorkspaceRevisionsResponse.deserialize, - ) - return self._stubs['describe_conversion_workspace_revisions'] - - @property - def fetch_static_ips(self) -> Callable[ - [clouddms.FetchStaticIpsRequest], - Awaitable[clouddms.FetchStaticIpsResponse]]: - r"""Return a callable for the fetch static ips method over gRPC. - - Fetches a set of static IP addresses that need to be - allowlisted by the customer when using the static-IP - connectivity method. - - Returns: - Callable[[~.FetchStaticIpsRequest], - Awaitable[~.FetchStaticIpsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_static_ips' not in self._stubs: - self._stubs['fetch_static_ips'] = self.grpc_channel.unary_unary( - '/google.cloud.clouddms.v1.DataMigrationService/FetchStaticIps', - request_serializer=clouddms.FetchStaticIpsRequest.serialize, - response_deserializer=clouddms.FetchStaticIpsResponse.deserialize, - ) - return self._stubs['fetch_static_ips'] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ( - 'DataMigrationServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/types/__init__.py deleted file mode 100644 index 62abe85..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/types/__init__.py +++ /dev/null @@ -1,216 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .clouddms import ( - ApplyConversionWorkspaceRequest, - CommitConversionWorkspaceRequest, - ConvertConversionWorkspaceRequest, - CreateConnectionProfileRequest, - CreateConversionWorkspaceRequest, - CreateMigrationJobRequest, - CreatePrivateConnectionRequest, - DeleteConnectionProfileRequest, - DeleteConversionWorkspaceRequest, - DeleteMigrationJobRequest, - DeletePrivateConnectionRequest, - DescribeConversionWorkspaceRevisionsRequest, - DescribeConversionWorkspaceRevisionsResponse, - DescribeDatabaseEntitiesRequest, - DescribeDatabaseEntitiesResponse, - FetchStaticIpsRequest, - FetchStaticIpsResponse, - GenerateSshScriptRequest, - GetConnectionProfileRequest, - GetConversionWorkspaceRequest, - GetMigrationJobRequest, - GetPrivateConnectionRequest, - ImportMappingRulesRequest, - ListConnectionProfilesRequest, - ListConnectionProfilesResponse, - ListConversionWorkspacesRequest, - ListConversionWorkspacesResponse, - ListMigrationJobsRequest, - ListMigrationJobsResponse, - ListPrivateConnectionsRequest, - ListPrivateConnectionsResponse, - OperationMetadata, - PromoteMigrationJobRequest, - RestartMigrationJobRequest, - ResumeMigrationJobRequest, - RollbackConversionWorkspaceRequest, - SearchBackgroundJobsRequest, - SearchBackgroundJobsResponse, - SeedConversionWorkspaceRequest, - SshScript, - StartMigrationJobRequest, - StopMigrationJobRequest, - UpdateConnectionProfileRequest, - UpdateConversionWorkspaceRequest, - UpdateMigrationJobRequest, - VerifyMigrationJobRequest, - VmCreationConfig, - VmSelectionConfig, -) -from .clouddms_resources import ( - AlloyDbConnectionProfile, - AlloyDbSettings, - CloudSqlConnectionProfile, - CloudSqlSettings, - ConnectionProfile, - ConversionWorkspaceInfo, - DatabaseType, - ForwardSshTunnelConnectivity, - MigrationJob, - MigrationJobVerificationError, - MySqlConnectionProfile, - OracleConnectionProfile, - PostgreSqlConnectionProfile, - PrivateConnection, - PrivateConnectivity, - PrivateServiceConnectConnectivity, - ReverseSshConnectivity, - SqlAclEntry, - SqlIpConfig, - SslConfig, - StaticIpConnectivity, - StaticServiceIpConnectivity, - VpcPeeringConfig, - VpcPeeringConnectivity, - DatabaseEngine, - DatabaseProvider, - NetworkArchitecture, -) -from .conversionworkspace_resources import ( - BackgroundJobLogEntry, - ColumnEntity, - ConstraintEntity, - ConversionWorkspace, - DatabaseEngineInfo, - DatabaseEntity, - EntityMapping, - EntityMappingLogEntry, - FunctionEntity, - IndexEntity, - PackageEntity, - SchemaEntity, - SequenceEntity, - StoredProcedureEntity, - SynonymEntity, - TableEntity, - TriggerEntity, - ViewEntity, - BackgroundJobType, - DatabaseEntityType, - ImportRulesFileFormat, -) - -__all__ = ( - 'ApplyConversionWorkspaceRequest', - 'CommitConversionWorkspaceRequest', - 'ConvertConversionWorkspaceRequest', - 'CreateConnectionProfileRequest', - 'CreateConversionWorkspaceRequest', - 'CreateMigrationJobRequest', - 'CreatePrivateConnectionRequest', - 'DeleteConnectionProfileRequest', - 'DeleteConversionWorkspaceRequest', - 'DeleteMigrationJobRequest', - 'DeletePrivateConnectionRequest', - 'DescribeConversionWorkspaceRevisionsRequest', - 'DescribeConversionWorkspaceRevisionsResponse', - 'DescribeDatabaseEntitiesRequest', - 'DescribeDatabaseEntitiesResponse', - 'FetchStaticIpsRequest', - 'FetchStaticIpsResponse', - 'GenerateSshScriptRequest', - 'GetConnectionProfileRequest', - 'GetConversionWorkspaceRequest', - 'GetMigrationJobRequest', - 'GetPrivateConnectionRequest', - 'ImportMappingRulesRequest', - 'ListConnectionProfilesRequest', - 'ListConnectionProfilesResponse', - 'ListConversionWorkspacesRequest', - 'ListConversionWorkspacesResponse', - 'ListMigrationJobsRequest', - 'ListMigrationJobsResponse', - 'ListPrivateConnectionsRequest', - 'ListPrivateConnectionsResponse', - 'OperationMetadata', - 'PromoteMigrationJobRequest', - 'RestartMigrationJobRequest', - 'ResumeMigrationJobRequest', - 'RollbackConversionWorkspaceRequest', - 'SearchBackgroundJobsRequest', - 'SearchBackgroundJobsResponse', - 'SeedConversionWorkspaceRequest', - 'SshScript', - 'StartMigrationJobRequest', - 'StopMigrationJobRequest', - 'UpdateConnectionProfileRequest', - 'UpdateConversionWorkspaceRequest', - 'UpdateMigrationJobRequest', - 'VerifyMigrationJobRequest', - 'VmCreationConfig', - 'VmSelectionConfig', - 'AlloyDbConnectionProfile', - 'AlloyDbSettings', - 'CloudSqlConnectionProfile', - 'CloudSqlSettings', - 'ConnectionProfile', - 'ConversionWorkspaceInfo', - 'DatabaseType', - 'ForwardSshTunnelConnectivity', - 'MigrationJob', - 'MigrationJobVerificationError', - 'MySqlConnectionProfile', - 'OracleConnectionProfile', - 'PostgreSqlConnectionProfile', - 'PrivateConnection', - 'PrivateConnectivity', - 'PrivateServiceConnectConnectivity', - 'ReverseSshConnectivity', - 'SqlAclEntry', - 'SqlIpConfig', - 'SslConfig', - 'StaticIpConnectivity', - 'StaticServiceIpConnectivity', - 'VpcPeeringConfig', - 'VpcPeeringConnectivity', - 'DatabaseEngine', - 'DatabaseProvider', - 'NetworkArchitecture', - 'BackgroundJobLogEntry', - 'ColumnEntity', - 'ConstraintEntity', - 'ConversionWorkspace', - 'DatabaseEngineInfo', - 'DatabaseEntity', - 'EntityMapping', - 'EntityMappingLogEntry', - 'FunctionEntity', - 'IndexEntity', - 'PackageEntity', - 'SchemaEntity', - 'SequenceEntity', - 'StoredProcedureEntity', - 'SynonymEntity', - 'TableEntity', - 'TriggerEntity', - 'ViewEntity', - 'BackgroundJobType', - 'DatabaseEntityType', - 'ImportRulesFileFormat', -) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms.py deleted file mode 100644 index 0c296cb..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms.py +++ /dev/null @@ -1,1718 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.clouddms_v1.types import clouddms_resources -from google.cloud.clouddms_v1.types import conversionworkspace_resources -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.clouddms.v1', - manifest={ - 'ListMigrationJobsRequest', - 'ListMigrationJobsResponse', - 'GetMigrationJobRequest', - 'CreateMigrationJobRequest', - 'UpdateMigrationJobRequest', - 'DeleteMigrationJobRequest', - 'StartMigrationJobRequest', - 'StopMigrationJobRequest', - 'ResumeMigrationJobRequest', - 'PromoteMigrationJobRequest', - 'VerifyMigrationJobRequest', - 'RestartMigrationJobRequest', - 'GenerateSshScriptRequest', - 'VmCreationConfig', - 'VmSelectionConfig', - 'SshScript', - 'ListConnectionProfilesRequest', - 'ListConnectionProfilesResponse', - 'GetConnectionProfileRequest', - 'CreateConnectionProfileRequest', - 'UpdateConnectionProfileRequest', - 'DeleteConnectionProfileRequest', - 'CreatePrivateConnectionRequest', - 'ListPrivateConnectionsRequest', - 'ListPrivateConnectionsResponse', - 'DeletePrivateConnectionRequest', - 'GetPrivateConnectionRequest', - 'OperationMetadata', - 'ListConversionWorkspacesRequest', - 'ListConversionWorkspacesResponse', - 'GetConversionWorkspaceRequest', - 'CreateConversionWorkspaceRequest', - 'UpdateConversionWorkspaceRequest', - 'DeleteConversionWorkspaceRequest', - 'CommitConversionWorkspaceRequest', - 'RollbackConversionWorkspaceRequest', - 'ApplyConversionWorkspaceRequest', - 'SeedConversionWorkspaceRequest', - 'ConvertConversionWorkspaceRequest', - 'ImportMappingRulesRequest', - 'DescribeDatabaseEntitiesRequest', - 'DescribeDatabaseEntitiesResponse', - 'SearchBackgroundJobsRequest', - 'SearchBackgroundJobsResponse', - 'DescribeConversionWorkspaceRevisionsRequest', - 'DescribeConversionWorkspaceRevisionsResponse', - 'FetchStaticIpsRequest', - 'FetchStaticIpsResponse', - }, -) - - -class ListMigrationJobsRequest(proto.Message): - r"""Retrieves a list of all migration jobs in a given project and - location. - - Attributes: - parent (str): - Required. The parent which owns this - collection of migrationJobs. - page_size (int): - The maximum number of migration jobs to - return. The service may return fewer than this - value. If unspecified, at most 50 migration jobs - will be returned. The maximum value is 1000; - values above 1000 are coerced to 1000. - page_token (str): - The nextPageToken value received in the - previous call to migrationJobs.list, used in the - subsequent request to retrieve the next page of - results. On first call this should be left - blank. When paginating, all other parameters - provided to migrationJobs.list must match the - call that provided the page token. - filter (str): - A filter expression that filters migration jobs listed in - the response. The expression must specify the field name, a - comparison operator, and the value that you want to use for - filtering. The value must be a string, a number, or a - boolean. The comparison operator must be either =, !=, >, or - <. For example, list migration jobs created this year by - specifying **createTime %gt; - 2020-01-01T00:00:00.000000000Z.** You can also filter nested - fields. For example, you could specify - **reverseSshConnectivity.vmIp = "1.2.3.4"** to select all - migration jobs connecting through the specific SSH tunnel - bastion. - order_by (str): - Sort the results based on the migration job - name. Valid values are: "name", "name asc", and - "name desc". - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListMigrationJobsResponse(proto.Message): - r"""Response message for 'ListMigrationJobs' request. - - Attributes: - migration_jobs (MutableSequence[google.cloud.clouddms_v1.types.MigrationJob]): - The list of migration jobs objects. - next_page_token (str): - A token which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - migration_jobs: MutableSequence[clouddms_resources.MigrationJob] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=clouddms_resources.MigrationJob, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetMigrationJobRequest(proto.Message): - r"""Request message for 'GetMigrationJob' request. - - Attributes: - name (str): - Required. Name of the migration job resource - to get. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateMigrationJobRequest(proto.Message): - r"""Request message to create a new Database Migration Service - migration job in the specified project and region. - - Attributes: - parent (str): - Required. The parent which owns this - collection of migration jobs. - migration_job_id (str): - Required. The ID of the instance to create. - migration_job (google.cloud.clouddms_v1.types.MigrationJob): - Required. Represents a `migration - job `__ - object. - request_id (str): - A unique ID used to identify the request. If the server - receives two requests with the same ID, then the second - request is ignored. - - It is recommended to always set this value to a UUID. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - migration_job_id: str = proto.Field( - proto.STRING, - number=2, - ) - migration_job: clouddms_resources.MigrationJob = proto.Field( - proto.MESSAGE, - number=3, - message=clouddms_resources.MigrationJob, - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateMigrationJobRequest(proto.Message): - r"""Request message for 'UpdateMigrationJob' request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the - fields to be overwritten by the update in the - conversion workspace resource. - migration_job (google.cloud.clouddms_v1.types.MigrationJob): - Required. The migration job parameters to - update. - request_id (str): - A unique ID used to identify the request. If the server - receives two requests with the same ID, then the second - request is ignored. - - It is recommended to always set this value to a UUID. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - migration_job: clouddms_resources.MigrationJob = proto.Field( - proto.MESSAGE, - number=2, - message=clouddms_resources.MigrationJob, - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DeleteMigrationJobRequest(proto.Message): - r"""Request message for 'DeleteMigrationJob' request. - - Attributes: - name (str): - Required. Name of the migration job resource - to delete. - request_id (str): - A unique ID used to identify the request. If the server - receives two requests with the same ID, then the second - request is ignored. - - It is recommended to always set this value to a UUID. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - force (bool): - The destination CloudSQL connection profile - is always deleted with the migration job. In - case of force delete, the destination CloudSQL - replica database is also deleted. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - force: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class StartMigrationJobRequest(proto.Message): - r"""Request message for 'StartMigrationJob' request. - - Attributes: - name (str): - Name of the migration job resource to start. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class StopMigrationJobRequest(proto.Message): - r"""Request message for 'StopMigrationJob' request. - - Attributes: - name (str): - Name of the migration job resource to stop. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ResumeMigrationJobRequest(proto.Message): - r"""Request message for 'ResumeMigrationJob' request. - - Attributes: - name (str): - Name of the migration job resource to resume. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class PromoteMigrationJobRequest(proto.Message): - r"""Request message for 'PromoteMigrationJob' request. - - Attributes: - name (str): - Name of the migration job resource to - promote. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class VerifyMigrationJobRequest(proto.Message): - r"""Request message for 'VerifyMigrationJob' request. - - Attributes: - name (str): - Name of the migration job resource to verify. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class RestartMigrationJobRequest(proto.Message): - r"""Request message for 'RestartMigrationJob' request. - - Attributes: - name (str): - Name of the migration job resource to - restart. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GenerateSshScriptRequest(proto.Message): - r"""Request message for 'GenerateSshScript' request. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - migration_job (str): - Name of the migration job resource to - generate the SSH script. - vm (str): - Required. Bastion VM Instance name to use or - to create. - vm_creation_config (google.cloud.clouddms_v1.types.VmCreationConfig): - The VM creation configuration - - This field is a member of `oneof`_ ``vm_config``. - vm_selection_config (google.cloud.clouddms_v1.types.VmSelectionConfig): - The VM selection configuration - - This field is a member of `oneof`_ ``vm_config``. - vm_port (int): - The port that will be open on the bastion - host. - """ - - migration_job: str = proto.Field( - proto.STRING, - number=1, - ) - vm: str = proto.Field( - proto.STRING, - number=2, - ) - vm_creation_config: 'VmCreationConfig' = proto.Field( - proto.MESSAGE, - number=100, - oneof='vm_config', - message='VmCreationConfig', - ) - vm_selection_config: 'VmSelectionConfig' = proto.Field( - proto.MESSAGE, - number=101, - oneof='vm_config', - message='VmSelectionConfig', - ) - vm_port: int = proto.Field( - proto.INT32, - number=3, - ) - - -class VmCreationConfig(proto.Message): - r"""VM creation configuration message - - Attributes: - vm_machine_type (str): - Required. VM instance machine type to create. - vm_zone (str): - The Google Cloud Platform zone to create the - VM in. - subnet (str): - The subnet name the vm needs to be created - in. - """ - - vm_machine_type: str = proto.Field( - proto.STRING, - number=1, - ) - vm_zone: str = proto.Field( - proto.STRING, - number=2, - ) - subnet: str = proto.Field( - proto.STRING, - number=3, - ) - - -class VmSelectionConfig(proto.Message): - r"""VM selection configuration message - - Attributes: - vm_zone (str): - Required. The Google Cloud Platform zone the - VM is located. - """ - - vm_zone: str = proto.Field( - proto.STRING, - number=1, - ) - - -class SshScript(proto.Message): - r"""Response message for 'GenerateSshScript' request. - - Attributes: - script (str): - The ssh configuration script. - """ - - script: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListConnectionProfilesRequest(proto.Message): - r"""Request message for 'ListConnectionProfiles' request. - - Attributes: - parent (str): - Required. The parent which owns this - collection of connection profiles. - page_size (int): - The maximum number of connection profiles to - return. The service may return fewer than this - value. If unspecified, at most 50 connection - profiles will be returned. The maximum value is - 1000; values above 1000 are coerced to 1000. - page_token (str): - A page token, received from a previous - ``ListConnectionProfiles`` call. Provide this to retrieve - the subsequent page. - - When paginating, all other parameters provided to - ``ListConnectionProfiles`` must match the call that provided - the page token. - filter (str): - A filter expression that filters connection profiles listed - in the response. The expression must specify the field name, - a comparison operator, and the value that you want to use - for filtering. The value must be a string, a number, or a - boolean. The comparison operator must be either =, !=, >, or - <. For example, list connection profiles created this year - by specifying **createTime %gt; - 2020-01-01T00:00:00.000000000Z**. You can also filter nested - fields. For example, you could specify **mySql.username = - %lt;my_username%gt;** to list all connection profiles - configured to connect with a specific username. - order_by (str): - A comma-separated list of fields to order - results according to. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListConnectionProfilesResponse(proto.Message): - r"""Response message for 'ListConnectionProfiles' request. - - Attributes: - connection_profiles (MutableSequence[google.cloud.clouddms_v1.types.ConnectionProfile]): - The response list of connection profiles. - next_page_token (str): - A token which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - connection_profiles: MutableSequence[clouddms_resources.ConnectionProfile] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=clouddms_resources.ConnectionProfile, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetConnectionProfileRequest(proto.Message): - r"""Request message for 'GetConnectionProfile' request. - - Attributes: - name (str): - Required. Name of the connection profile - resource to get. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateConnectionProfileRequest(proto.Message): - r"""Request message for 'CreateConnectionProfile' request. - - Attributes: - parent (str): - Required. The parent which owns this - collection of connection profiles. - connection_profile_id (str): - Required. The connection profile identifier. - connection_profile (google.cloud.clouddms_v1.types.ConnectionProfile): - Required. The create request body including - the connection profile data - request_id (str): - Optional. A unique ID used to identify the request. If the - server receives two requests with the same ID, then the - second request is ignored. - - It is recommended to always set this value to a UUID. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - validate_only (bool): - Optional. Only validate the connection - profile, but don't create any resources. The - default is false. Only supported for Oracle - connection profiles. - skip_validation (bool): - Optional. Create the connection profile - without validating it. The default is false. - Only supported for Oracle connection profiles. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - connection_profile_id: str = proto.Field( - proto.STRING, - number=2, - ) - connection_profile: clouddms_resources.ConnectionProfile = proto.Field( - proto.MESSAGE, - number=3, - message=clouddms_resources.ConnectionProfile, - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=5, - ) - skip_validation: bool = proto.Field( - proto.BOOL, - number=6, - ) - - -class UpdateConnectionProfileRequest(proto.Message): - r"""Request message for 'UpdateConnectionProfile' request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the - fields to be overwritten by the update in the - conversion workspace resource. - connection_profile (google.cloud.clouddms_v1.types.ConnectionProfile): - Required. The connection profile parameters - to update. - request_id (str): - Optional. A unique ID used to identify the request. If the - server receives two requests with the same ID, then the - second request is ignored. - - It is recommended to always set this value to a UUID. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - validate_only (bool): - Optional. Only validate the connection - profile, but don't update any resources. The - default is false. Only supported for Oracle - connection profiles. - skip_validation (bool): - Optional. Update the connection profile - without validating it. The default is false. - Only supported for Oracle connection profiles. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - connection_profile: clouddms_resources.ConnectionProfile = proto.Field( - proto.MESSAGE, - number=2, - message=clouddms_resources.ConnectionProfile, - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - skip_validation: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class DeleteConnectionProfileRequest(proto.Message): - r"""Request message for 'DeleteConnectionProfile' request. - - Attributes: - name (str): - Required. Name of the connection profile - resource to delete. - request_id (str): - A unique ID used to identify the request. If the server - receives two requests with the same ID, then the second - request is ignored. - - It is recommended to always set this value to a UUID. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - force (bool): - In case of force delete, the CloudSQL replica - database is also deleted (only for CloudSQL - connection profile). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - force: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class CreatePrivateConnectionRequest(proto.Message): - r"""Request message to create a new private connection in the - specified project and region. - - Attributes: - parent (str): - Required. The parent that owns the collection - of PrivateConnections. - private_connection_id (str): - Required. The private connection identifier. - private_connection (google.cloud.clouddms_v1.types.PrivateConnection): - Required. The private connection resource to - create. - request_id (str): - Optional. A unique ID used to identify the request. If the - server receives two requests with the same ID, then the - second request is ignored. - - It is recommended to always set this value to a UUID. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - skip_validation (bool): - Optional. If set to true, will skip - validations. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - private_connection_id: str = proto.Field( - proto.STRING, - number=2, - ) - private_connection: clouddms_resources.PrivateConnection = proto.Field( - proto.MESSAGE, - number=3, - message=clouddms_resources.PrivateConnection, - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - skip_validation: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class ListPrivateConnectionsRequest(proto.Message): - r"""Request message to retrieve a list of private connections in - a given project and location. - - Attributes: - parent (str): - Required. The parent that owns the collection - of private connections. - page_size (int): - Maximum number of private connections to - return. If unspecified, at most 50 private - connections that are returned. The maximum value - is 1000; values above 1000 are coerced to 1000. - page_token (str): - Page token received from a previous - ``ListPrivateConnections`` call. Provide this to retrieve - the subsequent page. - - When paginating, all other parameters provided to - ``ListPrivateConnections`` must match the call that provided - the page token. - filter (str): - A filter expression that filters private connections listed - in the response. The expression must specify the field name, - a comparison operator, and the value that you want to use - for filtering. The value must be a string, a number, or a - boolean. The comparison operator must be either =, !=, >, or - <. For example, list private connections created this year - by specifying **createTime %gt; - 2021-01-01T00:00:00.000000000Z**. - order_by (str): - Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListPrivateConnectionsResponse(proto.Message): - r"""Response message for 'ListPrivateConnections' request. - - Attributes: - private_connections (MutableSequence[google.cloud.clouddms_v1.types.PrivateConnection]): - List of private connections. - next_page_token (str): - A token which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - private_connections: MutableSequence[clouddms_resources.PrivateConnection] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=clouddms_resources.PrivateConnection, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class DeletePrivateConnectionRequest(proto.Message): - r"""Request message to delete a private connection. - - Attributes: - name (str): - Required. The name of the private connection - to delete. - request_id (str): - Optional. A unique ID used to identify the request. If the - server receives two requests with the same ID, then the - second request is ignored. - - It is recommended to always set this value to a UUID. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetPrivateConnectionRequest(proto.Message): - r"""Request message to get a private connection resource. - - Attributes: - name (str): - Required. The name of the private connection - to get. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class OperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ListConversionWorkspacesRequest(proto.Message): - r"""Retrieve a list of all conversion workspaces in a given - project and location. - - Attributes: - parent (str): - Required. The parent which owns this - collection of conversion workspaces. - page_size (int): - The maximum number of conversion workspaces - to return. The service may return fewer than - this value. If unspecified, at most 50 sets are - returned. - page_token (str): - The nextPageToken value received in the - previous call to conversionWorkspaces.list, used - in the subsequent request to retrieve the next - page of results. On first call this should be - left blank. When paginating, all other - parameters provided to conversionWorkspaces.list - must match the call that provided the page - token. - filter (str): - A filter expression that filters conversion workspaces - listed in the response. The expression must specify the - field name, a comparison operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The comparison operator must be either - =, !=, >, or <. For example, list conversion workspaces - created this year by specifying **createTime %gt; - 2020-01-01T00:00:00.000000000Z.** You can also filter nested - fields. For example, you could specify **source.version = - "12.c.1"** to select all conversion workspaces with source - database version equal to 12.c.1. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListConversionWorkspacesResponse(proto.Message): - r"""Response message for 'ListConversionWorkspaces' request. - - Attributes: - conversion_workspaces (MutableSequence[google.cloud.clouddms_v1.types.ConversionWorkspace]): - The list of conversion workspace objects. - next_page_token (str): - A token which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - conversion_workspaces: MutableSequence[conversionworkspace_resources.ConversionWorkspace] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=conversionworkspace_resources.ConversionWorkspace, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetConversionWorkspaceRequest(proto.Message): - r"""Request message for 'GetConversionWorkspace' request. - - Attributes: - name (str): - Required. Name of the conversion workspace - resource to get. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateConversionWorkspaceRequest(proto.Message): - r"""Request message to create a new Conversion Workspace - in the specified project and region. - - Attributes: - parent (str): - Required. The parent which owns this - collection of conversion workspaces. - conversion_workspace_id (str): - Required. The ID of the conversion workspace - to create. - conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): - Required. Represents a conversion workspace - object. - request_id (str): - A unique ID used to identify the request. If the server - receives two requests with the same ID, then the second - request is ignored. - - It is recommended to always set this value to a UUID. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - conversion_workspace_id: str = proto.Field( - proto.STRING, - number=2, - ) - conversion_workspace: conversionworkspace_resources.ConversionWorkspace = proto.Field( - proto.MESSAGE, - number=3, - message=conversionworkspace_resources.ConversionWorkspace, - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateConversionWorkspaceRequest(proto.Message): - r"""Request message for 'UpdateConversionWorkspace' request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the - fields to be overwritten by the update in the - conversion workspace resource. - conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): - Required. The conversion workspace parameters - to update. - request_id (str): - A unique ID used to identify the request. If the server - receives two requests with the same ID, then the second - request is ignored. - - It is recommended to always set this value to a UUID. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - conversion_workspace: conversionworkspace_resources.ConversionWorkspace = proto.Field( - proto.MESSAGE, - number=2, - message=conversionworkspace_resources.ConversionWorkspace, - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DeleteConversionWorkspaceRequest(proto.Message): - r"""Request message for 'DeleteConversionWorkspace' request. - - Attributes: - name (str): - Required. Name of the conversion workspace - resource to delete. - request_id (str): - A unique ID used to identify the request. If the server - receives two requests with the same ID, then the second - request is ignored. - - It is recommended to always set this value to a UUID. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CommitConversionWorkspaceRequest(proto.Message): - r"""Request message for 'CommitConversionWorkspace' request. - - Attributes: - name (str): - Required. Name of the conversion workspace - resource to commit. - commit_name (str): - Optional. Optional name of the commit. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - commit_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class RollbackConversionWorkspaceRequest(proto.Message): - r"""Request message for 'RollbackConversionWorkspace' request. - - Attributes: - name (str): - Required. Name of the conversion workspace - resource to roll back to. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ApplyConversionWorkspaceRequest(proto.Message): - r"""Request message for 'ApplyConversionWorkspace' request. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Required. The name of the conversion workspace resource for - which to apply the draft tree. Must be in the form of: - projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. - filter (str): - Filter which entities to apply. Leaving this - field empty will apply all of the entities. - Supports Google AIP 160 based filtering. - connection_profile (str): - Fully qualified (Uri) name of the destination - connection profile. - - This field is a member of `oneof`_ ``destination``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - connection_profile: str = proto.Field( - proto.STRING, - number=100, - oneof='destination', - ) - - -class SeedConversionWorkspaceRequest(proto.Message): - r"""Request message for 'SeedConversionWorkspace' request. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Name of the conversion workspace resource to seed with new - database structure, in the form of: - projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. - auto_commit (bool): - Should the conversion workspace be committed - automatically after the seed operation. - source_connection_profile (str): - Fully qualified (Uri) name of the source - connection profile. - - This field is a member of `oneof`_ ``seed_from``. - destination_connection_profile (str): - Fully qualified (Uri) name of the destination - connection profile. - - This field is a member of `oneof`_ ``seed_from``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - auto_commit: bool = proto.Field( - proto.BOOL, - number=2, - ) - source_connection_profile: str = proto.Field( - proto.STRING, - number=100, - oneof='seed_from', - ) - destination_connection_profile: str = proto.Field( - proto.STRING, - number=101, - oneof='seed_from', - ) - - -class ConvertConversionWorkspaceRequest(proto.Message): - r"""Request message for 'ConvertConversionWorkspace' request. - - Attributes: - name (str): - Name of the conversion workspace resource to convert in the - form of: - projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. - auto_commit (bool): - Specifies whether the conversion workspace is - to be committed automatically after the - conversion. - filter (str): - Filter the entities to convert. Leaving this - field empty will convert all of the entities. - Supports Google AIP-160 style filtering. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - auto_commit: bool = proto.Field( - proto.BOOL, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ImportMappingRulesRequest(proto.Message): - r"""Request message for 'ImportMappingRules' request. - - Attributes: - parent (str): - Required. Name of the conversion workspace resource to - import the rules to in the form of: - projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. - rules_format (google.cloud.clouddms_v1.types.ImportRulesFileFormat): - The format of the rules content file. - rules_files (MutableSequence[google.cloud.clouddms_v1.types.ImportMappingRulesRequest.RulesFile]): - One or more rules files. - auto_commit (bool): - Should the conversion workspace be committed - automatically after the import operation. - """ - - class RulesFile(proto.Message): - r"""Details of a single rules file. - - Attributes: - rules_source_filename (str): - The filename of the rules that needs to be - converted. The filename is used mainly so that - future logs of the import rules job contain it, - and can therefore be searched by it. - rules_content (str): - The text content of the rules that needs to - be converted. - """ - - rules_source_filename: str = proto.Field( - proto.STRING, - number=1, - ) - rules_content: str = proto.Field( - proto.STRING, - number=2, - ) - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - rules_format: conversionworkspace_resources.ImportRulesFileFormat = proto.Field( - proto.ENUM, - number=2, - enum=conversionworkspace_resources.ImportRulesFileFormat, - ) - rules_files: MutableSequence[RulesFile] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=RulesFile, - ) - auto_commit: bool = proto.Field( - proto.BOOL, - number=6, - ) - - -class DescribeDatabaseEntitiesRequest(proto.Message): - r"""Request message for 'DescribeDatabaseEntities' request. - - Attributes: - conversion_workspace (str): - Required. Name of the conversion workspace resource whose - database entities are described. Must be in the form of: - projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. - page_size (int): - The maximum number of entities to return. The - service may return fewer entities than the value - specifies. - page_token (str): - The nextPageToken value received in the - previous call to - conversionWorkspace.describeDatabaseEntities, - used in the subsequent request to retrieve the - next page of results. On first call this should - be left blank. When paginating, all other - parameters provided to - conversionWorkspace.describeDatabaseEntities - must match the call that provided the page - token. - tree (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest.DBTreeType): - The tree to fetch. - uncommitted (bool): - Whether to retrieve the latest committed version of the - entities or the latest version. This field is ignored if a - specific commit_id is specified. - commit_id (str): - Request a specific commit ID. If not - specified, the entities from the latest commit - are returned. - filter (str): - Filter the returned entities based on AIP-160 - standard. - """ - class DBTreeType(proto.Enum): - r"""The type of a tree to return - - Values: - DB_TREE_TYPE_UNSPECIFIED (0): - Unspecified tree type. - SOURCE_TREE (1): - The source database tree. - DRAFT_TREE (2): - The draft database tree. - DESTINATION_TREE (3): - The destination database tree. - """ - DB_TREE_TYPE_UNSPECIFIED = 0 - SOURCE_TREE = 1 - DRAFT_TREE = 2 - DESTINATION_TREE = 3 - - conversion_workspace: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - tree: DBTreeType = proto.Field( - proto.ENUM, - number=6, - enum=DBTreeType, - ) - uncommitted: bool = proto.Field( - proto.BOOL, - number=11, - ) - commit_id: str = proto.Field( - proto.STRING, - number=12, - ) - filter: str = proto.Field( - proto.STRING, - number=13, - ) - - -class DescribeDatabaseEntitiesResponse(proto.Message): - r"""Response message for 'DescribeDatabaseEntities' request. - - Attributes: - database_entities (MutableSequence[google.cloud.clouddms_v1.types.DatabaseEntity]): - The list of database entities for the - conversion workspace. - next_page_token (str): - A token which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - database_entities: MutableSequence[conversionworkspace_resources.DatabaseEntity] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=conversionworkspace_resources.DatabaseEntity, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class SearchBackgroundJobsRequest(proto.Message): - r"""Request message for 'SearchBackgroundJobs' request. - - Attributes: - conversion_workspace (str): - Required. Name of the conversion workspace resource whose - jobs are listed, in the form of: - projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. - return_most_recent_per_job_type (bool): - Optional. Whether or not to return just the - most recent job per job type, - max_size (int): - Optional. The maximum number of jobs to - return. The service may return fewer than this - value. If unspecified, at most 100 jobs are - returned. The maximum value is 100; values above - 100 are coerced to 100. - completed_until_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. If provided, only returns jobs that - completed until (not including) the given - timestamp. - """ - - conversion_workspace: str = proto.Field( - proto.STRING, - number=1, - ) - return_most_recent_per_job_type: bool = proto.Field( - proto.BOOL, - number=2, - ) - max_size: int = proto.Field( - proto.INT32, - number=3, - ) - completed_until_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class SearchBackgroundJobsResponse(proto.Message): - r"""Response message for 'SearchBackgroundJobs' request. - - Attributes: - jobs (MutableSequence[google.cloud.clouddms_v1.types.BackgroundJobLogEntry]): - The list of conversion workspace mapping - rules. - """ - - jobs: MutableSequence[conversionworkspace_resources.BackgroundJobLogEntry] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=conversionworkspace_resources.BackgroundJobLogEntry, - ) - - -class DescribeConversionWorkspaceRevisionsRequest(proto.Message): - r"""Request message for 'DescribeConversionWorkspaceRevisions' - request. - - Attributes: - conversion_workspace (str): - Required. Name of the conversion workspace resource whose - revisions are listed. Must be in the form of: - projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. - commit_id (str): - Optional. Optional filter to request a - specific commit ID. - """ - - conversion_workspace: str = proto.Field( - proto.STRING, - number=1, - ) - commit_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DescribeConversionWorkspaceRevisionsResponse(proto.Message): - r"""Response message for 'DescribeConversionWorkspaceRevisions' - request. - - Attributes: - revisions (MutableSequence[google.cloud.clouddms_v1.types.ConversionWorkspace]): - The list of conversion workspace revisions. - """ - - revisions: MutableSequence[conversionworkspace_resources.ConversionWorkspace] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=conversionworkspace_resources.ConversionWorkspace, - ) - - -class FetchStaticIpsRequest(proto.Message): - r"""Request message for 'FetchStaticIps' request. - - Attributes: - name (str): - Required. The resource name for the location for which - static IPs should be returned. Must be in the format - ``projects/*/locations/*``. - page_size (int): - Maximum number of IPs to return. - page_token (str): - A page token, received from a previous ``FetchStaticIps`` - call. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class FetchStaticIpsResponse(proto.Message): - r"""Response message for a 'FetchStaticIps' request. - - Attributes: - static_ips (MutableSequence[str]): - List of static IPs. - next_page_token (str): - A token that can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - static_ips: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms_resources.py b/owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms_resources.py deleted file mode 100644 index f0bdff0..0000000 --- a/owl-bot-staging/v1/google/cloud/clouddms_v1/types/clouddms_resources.py +++ /dev/null @@ -1,2025 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.clouddms.v1', - manifest={ - 'NetworkArchitecture', - 'DatabaseEngine', - 'DatabaseProvider', - 'SslConfig', - 'MySqlConnectionProfile', - 'PostgreSqlConnectionProfile', - 'OracleConnectionProfile', - 'CloudSqlConnectionProfile', - 'AlloyDbConnectionProfile', - 'SqlAclEntry', - 'SqlIpConfig', - 'CloudSqlSettings', - 'AlloyDbSettings', - 'StaticIpConnectivity', - 'PrivateServiceConnectConnectivity', - 'ReverseSshConnectivity', - 'VpcPeeringConnectivity', - 'ForwardSshTunnelConnectivity', - 'StaticServiceIpConnectivity', - 'PrivateConnectivity', - 'DatabaseType', - 'MigrationJob', - 'ConversionWorkspaceInfo', - 'ConnectionProfile', - 'MigrationJobVerificationError', - 'PrivateConnection', - 'VpcPeeringConfig', - }, -) - - -class NetworkArchitecture(proto.Enum): - r""" - - Values: - NETWORK_ARCHITECTURE_UNSPECIFIED (0): - No description available. - NETWORK_ARCHITECTURE_OLD_CSQL_PRODUCER (1): - Instance is in Cloud SQL's old producer - network architecture. - NETWORK_ARCHITECTURE_NEW_CSQL_PRODUCER (2): - Instance is in Cloud SQL's new producer - network architecture. - """ - NETWORK_ARCHITECTURE_UNSPECIFIED = 0 - NETWORK_ARCHITECTURE_OLD_CSQL_PRODUCER = 1 - NETWORK_ARCHITECTURE_NEW_CSQL_PRODUCER = 2 - - -class DatabaseEngine(proto.Enum): - r"""The database engine types. - - Values: - DATABASE_ENGINE_UNSPECIFIED (0): - The source database engine of the migration - job is unknown. - MYSQL (1): - The source engine is MySQL. - POSTGRESQL (2): - The source engine is PostgreSQL. - ORACLE (4): - The source engine is Oracle. - """ - DATABASE_ENGINE_UNSPECIFIED = 0 - MYSQL = 1 - POSTGRESQL = 2 - ORACLE = 4 - - -class DatabaseProvider(proto.Enum): - r"""The database providers. - - Values: - DATABASE_PROVIDER_UNSPECIFIED (0): - The database provider is unknown. - CLOUDSQL (1): - CloudSQL runs the database. - RDS (2): - RDS runs the database. - AURORA (3): - Amazon Aurora. - ALLOYDB (4): - AlloyDB. - """ - DATABASE_PROVIDER_UNSPECIFIED = 0 - CLOUDSQL = 1 - RDS = 2 - AURORA = 3 - ALLOYDB = 4 - - -class SslConfig(proto.Message): - r"""SSL configuration information. - - Attributes: - type_ (google.cloud.clouddms_v1.types.SslConfig.SslType): - Output only. The ssl config type according to 'client_key', - 'client_certificate' and 'ca_certificate'. - client_key (str): - Input only. The unencrypted PKCS#1 or PKCS#8 PEM-encoded - private key associated with the Client Certificate. If this - field is used then the 'client_certificate' field is - mandatory. - client_certificate (str): - Input only. The x509 PEM-encoded certificate that will be - used by the replica to authenticate against the source - database server.If this field is used then the 'client_key' - field is mandatory. - ca_certificate (str): - Required. Input only. The x509 PEM-encoded - certificate of the CA that signed the source - database server's certificate. The replica will - use this certificate to verify it's connecting - to the right host. - """ - class SslType(proto.Enum): - r"""Specifies The kind of ssl configuration used. - - Values: - SSL_TYPE_UNSPECIFIED (0): - Unspecified. - SERVER_ONLY (1): - Only 'ca_certificate' specified. - SERVER_CLIENT (2): - Both server ('ca_certificate'), and client ('client_key', - 'client_certificate') specified. - """ - SSL_TYPE_UNSPECIFIED = 0 - SERVER_ONLY = 1 - SERVER_CLIENT = 2 - - type_: SslType = proto.Field( - proto.ENUM, - number=1, - enum=SslType, - ) - client_key: str = proto.Field( - proto.STRING, - number=2, - ) - client_certificate: str = proto.Field( - proto.STRING, - number=3, - ) - ca_certificate: str = proto.Field( - proto.STRING, - number=4, - ) - - -class MySqlConnectionProfile(proto.Message): - r"""Specifies connection parameters required specifically for - MySQL databases. - - Attributes: - host (str): - Required. The IP or hostname of the source - MySQL database. - port (int): - Required. The network port of the source - MySQL database. - username (str): - Required. The username that Database - Migration Service will use to connect to the - database. The value is encrypted when stored in - Database Migration Service. - password (str): - Required. Input only. The password for the - user that Database Migration Service will be - using to connect to the database. This field is - not returned on request, and the value is - encrypted when stored in Database Migration - Service. - password_set (bool): - Output only. Indicates If this connection - profile password is stored. - ssl (google.cloud.clouddms_v1.types.SslConfig): - SSL configuration for the destination to - connect to the source database. - cloud_sql_id (str): - If the source is a Cloud SQL database, use - this field to provide the Cloud SQL instance ID - of the source. - """ - - host: str = proto.Field( - proto.STRING, - number=1, - ) - port: int = proto.Field( - proto.INT32, - number=2, - ) - username: str = proto.Field( - proto.STRING, - number=3, - ) - password: str = proto.Field( - proto.STRING, - number=4, - ) - password_set: bool = proto.Field( - proto.BOOL, - number=5, - ) - ssl: 'SslConfig' = proto.Field( - proto.MESSAGE, - number=6, - message='SslConfig', - ) - cloud_sql_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class PostgreSqlConnectionProfile(proto.Message): - r"""Specifies connection parameters required specifically for - PostgreSQL databases. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - host (str): - Required. The IP or hostname of the source - PostgreSQL database. - port (int): - Required. The network port of the source - PostgreSQL database. - username (str): - Required. The username that Database - Migration Service will use to connect to the - database. The value is encrypted when stored in - Database Migration Service. - password (str): - Required. Input only. The password for the - user that Database Migration Service will be - using to connect to the database. This field is - not returned on request, and the value is - encrypted when stored in Database Migration - Service. - password_set (bool): - Output only. Indicates If this connection - profile password is stored. - ssl (google.cloud.clouddms_v1.types.SslConfig): - SSL configuration for the destination to - connect to the source database. - cloud_sql_id (str): - If the source is a Cloud SQL database, use - this field to provide the Cloud SQL instance ID - of the source. - network_architecture (google.cloud.clouddms_v1.types.NetworkArchitecture): - Output only. If the source is a Cloud SQL - database, this field indicates the network - architecture it's associated with. - static_ip_connectivity (google.cloud.clouddms_v1.types.StaticIpConnectivity): - Static ip connectivity data (default, no - additional details needed). - - This field is a member of `oneof`_ ``connectivity``. - private_service_connect_connectivity (google.cloud.clouddms_v1.types.PrivateServiceConnectConnectivity): - Private service connect connectivity. - - This field is a member of `oneof`_ ``connectivity``. - """ - - host: str = proto.Field( - proto.STRING, - number=1, - ) - port: int = proto.Field( - proto.INT32, - number=2, - ) - username: str = proto.Field( - proto.STRING, - number=3, - ) - password: str = proto.Field( - proto.STRING, - number=4, - ) - password_set: bool = proto.Field( - proto.BOOL, - number=5, - ) - ssl: 'SslConfig' = proto.Field( - proto.MESSAGE, - number=6, - message='SslConfig', - ) - cloud_sql_id: str = proto.Field( - proto.STRING, - number=7, - ) - network_architecture: 'NetworkArchitecture' = proto.Field( - proto.ENUM, - number=8, - enum='NetworkArchitecture', - ) - static_ip_connectivity: 'StaticIpConnectivity' = proto.Field( - proto.MESSAGE, - number=100, - oneof='connectivity', - message='StaticIpConnectivity', - ) - private_service_connect_connectivity: 'PrivateServiceConnectConnectivity' = proto.Field( - proto.MESSAGE, - number=101, - oneof='connectivity', - message='PrivateServiceConnectConnectivity', - ) - - -class OracleConnectionProfile(proto.Message): - r"""Specifies connection parameters required specifically for - Oracle databases. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - host (str): - Required. The IP or hostname of the source - Oracle database. - port (int): - Required. The network port of the source - Oracle database. - username (str): - Required. The username that Database - Migration Service will use to connect to the - database. The value is encrypted when stored in - Database Migration Service. - password (str): - Required. Input only. The password for the - user that Database Migration Service will be - using to connect to the database. This field is - not returned on request, and the value is - encrypted when stored in Database Migration - Service. - password_set (bool): - Output only. Indicates whether a new password - is included in the request. - database_service (str): - Required. Database service for the Oracle - connection. - static_service_ip_connectivity (google.cloud.clouddms_v1.types.StaticServiceIpConnectivity): - Static Service IP connectivity. - - This field is a member of `oneof`_ ``connectivity``. - forward_ssh_connectivity (google.cloud.clouddms_v1.types.ForwardSshTunnelConnectivity): - Forward SSH tunnel connectivity. - - This field is a member of `oneof`_ ``connectivity``. - private_connectivity (google.cloud.clouddms_v1.types.PrivateConnectivity): - Private connectivity. - - This field is a member of `oneof`_ ``connectivity``. - """ - - host: str = proto.Field( - proto.STRING, - number=1, - ) - port: int = proto.Field( - proto.INT32, - number=2, - ) - username: str = proto.Field( - proto.STRING, - number=3, - ) - password: str = proto.Field( - proto.STRING, - number=4, - ) - password_set: bool = proto.Field( - proto.BOOL, - number=5, - ) - database_service: str = proto.Field( - proto.STRING, - number=6, - ) - static_service_ip_connectivity: 'StaticServiceIpConnectivity' = proto.Field( - proto.MESSAGE, - number=100, - oneof='connectivity', - message='StaticServiceIpConnectivity', - ) - forward_ssh_connectivity: 'ForwardSshTunnelConnectivity' = proto.Field( - proto.MESSAGE, - number=101, - oneof='connectivity', - message='ForwardSshTunnelConnectivity', - ) - private_connectivity: 'PrivateConnectivity' = proto.Field( - proto.MESSAGE, - number=102, - oneof='connectivity', - message='PrivateConnectivity', - ) - - -class CloudSqlConnectionProfile(proto.Message): - r"""Specifies required connection parameters, and, optionally, - the parameters required to create a Cloud SQL destination - database instance. - - Attributes: - cloud_sql_id (str): - Output only. The Cloud SQL instance ID that - this connection profile is associated with. - settings (google.cloud.clouddms_v1.types.CloudSqlSettings): - Immutable. Metadata used to create the - destination Cloud SQL database. - private_ip (str): - Output only. The Cloud SQL database - instance's private IP. - public_ip (str): - Output only. The Cloud SQL database - instance's public IP. - additional_public_ip (str): - Output only. The Cloud SQL database - instance's additional (outgoing) public IP. Used - when the Cloud SQL database availability type is - REGIONAL (i.e. multiple zones / highly - available). - """ - - cloud_sql_id: str = proto.Field( - proto.STRING, - number=1, - ) - settings: 'CloudSqlSettings' = proto.Field( - proto.MESSAGE, - number=2, - message='CloudSqlSettings', - ) - private_ip: str = proto.Field( - proto.STRING, - number=3, - ) - public_ip: str = proto.Field( - proto.STRING, - number=4, - ) - additional_public_ip: str = proto.Field( - proto.STRING, - number=5, - ) - - -class AlloyDbConnectionProfile(proto.Message): - r"""Specifies required connection parameters, and the parameters - required to create an AlloyDB destination cluster. - - Attributes: - cluster_id (str): - Required. The AlloyDB cluster ID that this - connection profile is associated with. - settings (google.cloud.clouddms_v1.types.AlloyDbSettings): - Immutable. Metadata used to create the - destination AlloyDB cluster. - """ - - cluster_id: str = proto.Field( - proto.STRING, - number=1, - ) - settings: 'AlloyDbSettings' = proto.Field( - proto.MESSAGE, - number=2, - message='AlloyDbSettings', - ) - - -class SqlAclEntry(proto.Message): - r"""An entry for an Access Control list. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - value (str): - The allowlisted value for the access control - list. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - The time when this access control entry expires in `RFC - 3339 `__ format, for - example: ``2012-11-15T16:19:00.094Z``. - - This field is a member of `oneof`_ ``expiration``. - ttl (google.protobuf.duration_pb2.Duration): - Input only. The time-to-leave of this access - control entry. - - This field is a member of `oneof`_ ``expiration``. - label (str): - A label to identify this entry. - """ - - value: str = proto.Field( - proto.STRING, - number=1, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - oneof='expiration', - message=timestamp_pb2.Timestamp, - ) - ttl: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=11, - oneof='expiration', - message=duration_pb2.Duration, - ) - label: str = proto.Field( - proto.STRING, - number=3, - ) - - -class SqlIpConfig(proto.Message): - r"""IP Management configuration. - - Attributes: - enable_ipv4 (google.protobuf.wrappers_pb2.BoolValue): - Whether the instance should be assigned an - IPv4 address or not. - private_network (str): - The resource link for the VPC network from which the Cloud - SQL instance is accessible for private IP. For example, - ``projects/myProject/global/networks/default``. This setting - can be updated, but it cannot be removed after it is set. - allocated_ip_range (str): - Optional. The name of the allocated IP - address range for the private IP Cloud SQL - instance. This name refers to an already - allocated IP range address. If set, the instance - IP address will be created in the allocated - range. Note that this IP address range can't be - modified after the instance is created. If you - change the VPC when configuring connectivity - settings for the migration job, this field is - not relevant. - require_ssl (google.protobuf.wrappers_pb2.BoolValue): - Whether SSL connections over IP should be - enforced or not. - authorized_networks (MutableSequence[google.cloud.clouddms_v1.types.SqlAclEntry]): - The list of external networks that are allowed to connect to - the instance using the IP. See - https://en.wikipedia.org/wiki/CIDR_notation#CIDR_notation, - also known as 'slash' notation (e.g. ``192.168.100.0/24``). - """ - - enable_ipv4: wrappers_pb2.BoolValue = proto.Field( - proto.MESSAGE, - number=1, - message=wrappers_pb2.BoolValue, - ) - private_network: str = proto.Field( - proto.STRING, - number=2, - ) - allocated_ip_range: str = proto.Field( - proto.STRING, - number=5, - ) - require_ssl: wrappers_pb2.BoolValue = proto.Field( - proto.MESSAGE, - number=3, - message=wrappers_pb2.BoolValue, - ) - authorized_networks: MutableSequence['SqlAclEntry'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='SqlAclEntry', - ) - - -class CloudSqlSettings(proto.Message): - r"""Settings for creating a Cloud SQL database instance. - - Attributes: - database_version (google.cloud.clouddms_v1.types.CloudSqlSettings.SqlDatabaseVersion): - The database engine type and version. - user_labels (MutableMapping[str, str]): - The resource labels for a Cloud SQL instance to use to - annotate any related underlying resources such as Compute - Engine VMs. An object containing a list of "key": "value" - pairs. - - Example: - ``{ "name": "wrench", "mass": "18kg", "count": "3" }``. - tier (str): - The tier (or machine type) for this instance, for example: - ``db-n1-standard-1`` (MySQL instances) or - ``db-custom-1-3840`` (PostgreSQL instances). For more - information, see `Cloud SQL Instance - Settings `__. - storage_auto_resize_limit (google.protobuf.wrappers_pb2.Int64Value): - The maximum size to which storage capacity - can be automatically increased. The default - value is 0, which specifies that there is no - limit. - activation_policy (google.cloud.clouddms_v1.types.CloudSqlSettings.SqlActivationPolicy): - The activation policy specifies when the instance is - activated; it is applicable only when the instance state is - 'RUNNABLE'. Valid values: - - 'ALWAYS': The instance is on, and remains so even in the - absence of connection requests. - - ``NEVER``: The instance is off; it is not activated, even if - a connection request arrives. - ip_config (google.cloud.clouddms_v1.types.SqlIpConfig): - The settings for IP Management. This allows - to enable or disable the instance IP and manage - which external networks can connect to the - instance. The IPv4 address cannot be disabled. - auto_storage_increase (google.protobuf.wrappers_pb2.BoolValue): - [default: ON] If you enable this setting, Cloud SQL checks - your available storage every 30 seconds. If the available - storage falls below a threshold size, Cloud SQL - automatically adds additional storage capacity. If the - available storage repeatedly falls below the threshold size, - Cloud SQL continues to add storage until it reaches the - maximum of 30 TB. - database_flags (MutableMapping[str, str]): - The database flags passed to the Cloud SQL - instance at startup. An object containing a list - of "key": value pairs. Example: { "name": - "wrench", "mass": "1.3kg", "count": "3" }. - data_disk_type (google.cloud.clouddms_v1.types.CloudSqlSettings.SqlDataDiskType): - The type of storage: ``PD_SSD`` (default) or ``PD_HDD``. - data_disk_size_gb (google.protobuf.wrappers_pb2.Int64Value): - The storage capacity available to the - database, in GB. The minimum (and default) size - is 10GB. - zone (str): - The Google Cloud Platform zone where your - Cloud SQL database instance is located. - secondary_zone (str): - Optional. The Google Cloud Platform zone - where the failover Cloud SQL database instance - is located. Used when the Cloud SQL database - availability type is REGIONAL (i.e. multiple - zones / highly available). - source_id (str): - The Database Migration Service source connection profile ID, - in the format: - ``projects/my_project_name/locations/us-central1/connectionProfiles/connection_profile_ID`` - root_password (str): - Input only. Initial root password. - root_password_set (bool): - Output only. Indicates If this connection - profile root password is stored. - collation (str): - The Cloud SQL default instance level - collation. - cmek_key_name (str): - The KMS key name used for the csql instance. - availability_type (google.cloud.clouddms_v1.types.CloudSqlSettings.SqlAvailabilityType): - Optional. Availability type. Potential values: - - - ``ZONAL``: The instance serves data from only one zone. - Outages in that zone affect data availability. - - ``REGIONAL``: The instance can serve data from more than - one zone in a region (it is highly available). - """ - class SqlActivationPolicy(proto.Enum): - r"""Specifies when the instance should be activated. - - Values: - SQL_ACTIVATION_POLICY_UNSPECIFIED (0): - unspecified policy. - ALWAYS (1): - The instance is always up and running. - NEVER (2): - The instance should never spin up. - """ - SQL_ACTIVATION_POLICY_UNSPECIFIED = 0 - ALWAYS = 1 - NEVER = 2 - - class SqlDataDiskType(proto.Enum): - r"""The storage options for Cloud SQL databases. - - Values: - SQL_DATA_DISK_TYPE_UNSPECIFIED (0): - Unspecified. - PD_SSD (1): - SSD disk. - PD_HDD (2): - HDD disk. - """ - SQL_DATA_DISK_TYPE_UNSPECIFIED = 0 - PD_SSD = 1 - PD_HDD = 2 - - class SqlDatabaseVersion(proto.Enum): - r"""The database engine type and version. - - Values: - SQL_DATABASE_VERSION_UNSPECIFIED (0): - Unspecified version. - MYSQL_5_6 (1): - MySQL 5.6. - MYSQL_5_7 (2): - MySQL 5.7. - POSTGRES_9_6 (3): - PostgreSQL 9.6. - POSTGRES_11 (4): - PostgreSQL 11. - POSTGRES_10 (5): - PostgreSQL 10. - MYSQL_8_0 (6): - MySQL 8.0. - POSTGRES_12 (7): - PostgreSQL 12. - POSTGRES_13 (8): - PostgreSQL 13. - POSTGRES_14 (17): - PostgreSQL 14. - """ - SQL_DATABASE_VERSION_UNSPECIFIED = 0 - MYSQL_5_6 = 1 - MYSQL_5_7 = 2 - POSTGRES_9_6 = 3 - POSTGRES_11 = 4 - POSTGRES_10 = 5 - MYSQL_8_0 = 6 - POSTGRES_12 = 7 - POSTGRES_13 = 8 - POSTGRES_14 = 17 - - class SqlAvailabilityType(proto.Enum): - r"""The availability type of the given Cloud SQL instance. - - Values: - SQL_AVAILABILITY_TYPE_UNSPECIFIED (0): - This is an unknown Availability type. - ZONAL (1): - Zonal availablility instance. - REGIONAL (2): - Regional availability instance. - """ - SQL_AVAILABILITY_TYPE_UNSPECIFIED = 0 - ZONAL = 1 - REGIONAL = 2 - - database_version: SqlDatabaseVersion = proto.Field( - proto.ENUM, - number=1, - enum=SqlDatabaseVersion, - ) - user_labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - tier: str = proto.Field( - proto.STRING, - number=3, - ) - storage_auto_resize_limit: wrappers_pb2.Int64Value = proto.Field( - proto.MESSAGE, - number=4, - message=wrappers_pb2.Int64Value, - ) - activation_policy: SqlActivationPolicy = proto.Field( - proto.ENUM, - number=5, - enum=SqlActivationPolicy, - ) - ip_config: 'SqlIpConfig' = proto.Field( - proto.MESSAGE, - number=6, - message='SqlIpConfig', - ) - auto_storage_increase: wrappers_pb2.BoolValue = proto.Field( - proto.MESSAGE, - number=7, - message=wrappers_pb2.BoolValue, - ) - database_flags: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - data_disk_type: SqlDataDiskType = proto.Field( - proto.ENUM, - number=9, - enum=SqlDataDiskType, - ) - data_disk_size_gb: wrappers_pb2.Int64Value = proto.Field( - proto.MESSAGE, - number=10, - message=wrappers_pb2.Int64Value, - ) - zone: str = proto.Field( - proto.STRING, - number=11, - ) - secondary_zone: str = proto.Field( - proto.STRING, - number=18, - ) - source_id: str = proto.Field( - proto.STRING, - number=12, - ) - root_password: str = proto.Field( - proto.STRING, - number=13, - ) - root_password_set: bool = proto.Field( - proto.BOOL, - number=14, - ) - collation: str = proto.Field( - proto.STRING, - number=15, - ) - cmek_key_name: str = proto.Field( - proto.STRING, - number=16, - ) - availability_type: SqlAvailabilityType = proto.Field( - proto.ENUM, - number=17, - enum=SqlAvailabilityType, - ) - - -class AlloyDbSettings(proto.Message): - r"""Settings for creating an AlloyDB cluster. - - Attributes: - initial_user (google.cloud.clouddms_v1.types.AlloyDbSettings.UserPassword): - Required. Input only. Initial user to setup - during cluster creation. Required. - vpc_network (str): - Required. The resource link for the VPC network in which - cluster resources are created and from which they are - accessible via Private IP. The network must belong to the - same project as the cluster. It is specified in the form: - "projects/{project_number}/global/networks/{network_id}". - This is required to create a cluster. - labels (MutableMapping[str, str]): - Labels for the AlloyDB cluster created by - DMS. An object containing a list of 'key', - 'value' pairs. - primary_instance_settings (google.cloud.clouddms_v1.types.AlloyDbSettings.PrimaryInstanceSettings): - - encryption_config (google.cloud.clouddms_v1.types.AlloyDbSettings.EncryptionConfig): - Optional. The encryption config can be - specified to encrypt the data disks and other - persistent data resources of a cluster with a - customer-managed encryption key (CMEK). When - this field is not specified, the cluster will - then use default encryption scheme to protect - the user data. - """ - - class UserPassword(proto.Message): - r"""The username/password for a database user. Used for - specifying initial users at cluster creation time. - - Attributes: - user (str): - The database username. - password (str): - The initial password for the user. - password_set (bool): - Output only. Indicates if the initial_user.password field - has been set. - """ - - user: str = proto.Field( - proto.STRING, - number=1, - ) - password: str = proto.Field( - proto.STRING, - number=2, - ) - password_set: bool = proto.Field( - proto.BOOL, - number=3, - ) - - class PrimaryInstanceSettings(proto.Message): - r"""Settings for the cluster's primary instance - - Attributes: - id (str): - Required. The ID of the AlloyDB primary instance. The ID - must satisfy the regex expression "[a-z0-9-]+". - machine_config (google.cloud.clouddms_v1.types.AlloyDbSettings.PrimaryInstanceSettings.MachineConfig): - Configuration for the machines that host the - underlying database engine. - database_flags (MutableMapping[str, str]): - Database flags to pass to AlloyDB when DMS is - creating the AlloyDB cluster and instances. See - the AlloyDB documentation for how these can be - used. - labels (MutableMapping[str, str]): - Labels for the AlloyDB primary instance - created by DMS. An object containing a list of - 'key', 'value' pairs. - private_ip (str): - Output only. The private IP address for the - Instance. This is the connection endpoint for an - end-user application. - """ - - class MachineConfig(proto.Message): - r"""MachineConfig describes the configuration of a machine. - - Attributes: - cpu_count (int): - The number of CPU's in the VM instance. - """ - - cpu_count: int = proto.Field( - proto.INT32, - number=1, - ) - - id: str = proto.Field( - proto.STRING, - number=1, - ) - machine_config: 'AlloyDbSettings.PrimaryInstanceSettings.MachineConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='AlloyDbSettings.PrimaryInstanceSettings.MachineConfig', - ) - database_flags: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - private_ip: str = proto.Field( - proto.STRING, - number=8, - ) - - class EncryptionConfig(proto.Message): - r"""EncryptionConfig describes the encryption config of a cluster - that is encrypted with a CMEK (customer-managed encryption key). - - Attributes: - kms_key_name (str): - The fully-qualified resource name of the KMS key. Each Cloud - KMS key is regionalized and has the following format: - projects/[PROJECT]/locations/[REGION]/keyRings/[RING]/cryptoKeys/[KEY_NAME] - """ - - kms_key_name: str = proto.Field( - proto.STRING, - number=1, - ) - - initial_user: UserPassword = proto.Field( - proto.MESSAGE, - number=1, - message=UserPassword, - ) - vpc_network: str = proto.Field( - proto.STRING, - number=2, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - primary_instance_settings: PrimaryInstanceSettings = proto.Field( - proto.MESSAGE, - number=4, - message=PrimaryInstanceSettings, - ) - encryption_config: EncryptionConfig = proto.Field( - proto.MESSAGE, - number=5, - message=EncryptionConfig, - ) - - -class StaticIpConnectivity(proto.Message): - r"""The source database will allow incoming connections from the - public IP of the destination database. You can retrieve the - public IP of the Cloud SQL instance from the Cloud SQL console - or using Cloud SQL APIs. No additional configuration is - required. - - """ - - -class PrivateServiceConnectConnectivity(proto.Message): - r"""Private Service Connect connectivity - (https://cloud.google.com/vpc/docs/private-service-connect#service-attachments) - - Attributes: - service_attachment (str): - Required. A service attachment that exposes a database, and - has the following format: - projects/{project}/regions/{region}/serviceAttachments/{service_attachment_name} - """ - - service_attachment: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ReverseSshConnectivity(proto.Message): - r"""The details needed to configure a reverse SSH tunnel between - the source and destination databases. These details will be used - when calling the generateSshScript method (see - https://cloud.google.com/database-migration/docs/reference/rest/v1/projects.locations.migrationJobs/generateSshScript) - to produce the script that will help set up the reverse SSH - tunnel, and to set up the VPC peering between the Cloud SQL - private network and the VPC. - - Attributes: - vm_ip (str): - Required. The IP of the virtual machine - (Compute Engine) used as the bastion server for - the SSH tunnel. - vm_port (int): - Required. The forwarding port of the virtual - machine (Compute Engine) used as the bastion - server for the SSH tunnel. - vm (str): - The name of the virtual machine (Compute - Engine) used as the bastion server for the SSH - tunnel. - vpc (str): - The name of the VPC to peer with the Cloud - SQL private network. - """ - - vm_ip: str = proto.Field( - proto.STRING, - number=1, - ) - vm_port: int = proto.Field( - proto.INT32, - number=2, - ) - vm: str = proto.Field( - proto.STRING, - number=3, - ) - vpc: str = proto.Field( - proto.STRING, - number=4, - ) - - -class VpcPeeringConnectivity(proto.Message): - r"""The details of the VPC where the source database is located - in Google Cloud. We will use this information to set up the VPC - peering connection between Cloud SQL and this VPC. - - Attributes: - vpc (str): - The name of the VPC network to peer with the - Cloud SQL private network. - """ - - vpc: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ForwardSshTunnelConnectivity(proto.Message): - r"""Forward SSH Tunnel connectivity. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - hostname (str): - Required. Hostname for the SSH tunnel. - username (str): - Required. Username for the SSH tunnel. - port (int): - Port for the SSH tunnel, default value is 22. - password (str): - Input only. SSH password. - - This field is a member of `oneof`_ ``authentication_method``. - private_key (str): - Input only. SSH private key. - - This field is a member of `oneof`_ ``authentication_method``. - """ - - hostname: str = proto.Field( - proto.STRING, - number=1, - ) - username: str = proto.Field( - proto.STRING, - number=2, - ) - port: int = proto.Field( - proto.INT32, - number=3, - ) - password: str = proto.Field( - proto.STRING, - number=100, - oneof='authentication_method', - ) - private_key: str = proto.Field( - proto.STRING, - number=101, - oneof='authentication_method', - ) - - -class StaticServiceIpConnectivity(proto.Message): - r"""Static IP address connectivity configured on service project. - """ - - -class PrivateConnectivity(proto.Message): - r"""Private Connectivity. - - Attributes: - private_connection (str): - Required. The resource name (URI) of the - private connection. - """ - - private_connection: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DatabaseType(proto.Message): - r"""A message defining the database engine and provider. - - Attributes: - provider (google.cloud.clouddms_v1.types.DatabaseProvider): - The database provider. - engine (google.cloud.clouddms_v1.types.DatabaseEngine): - The database engine. - """ - - provider: 'DatabaseProvider' = proto.Field( - proto.ENUM, - number=1, - enum='DatabaseProvider', - ) - engine: 'DatabaseEngine' = proto.Field( - proto.ENUM, - number=2, - enum='DatabaseEngine', - ) - - -class MigrationJob(proto.Message): - r"""Represents a Database Migration Service migration job object. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The name (URI) of this migration job - resource, in the form of: - projects/{project}/locations/{location}/migrationJobs/{migrationJob}. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp when the migration - job resource was created. A timestamp in RFC3339 - UTC "Zulu" format, accurate to nanoseconds. - Example: "2014-10-02T15:01:23.045123456Z". - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp when the migration - job resource was last updated. A timestamp in - RFC3339 UTC "Zulu" format, accurate to - nanoseconds. Example: - "2014-10-02T15:01:23.045123456Z". - labels (MutableMapping[str, str]): - The resource labels for migration job to use to annotate any - related underlying resources such as Compute Engine VMs. An - object containing a list of "key": "value" pairs. - - Example: - ``{ "name": "wrench", "mass": "1.3kg", "count": "3" }``. - display_name (str): - The migration job display name. - state (google.cloud.clouddms_v1.types.MigrationJob.State): - The current migration job state. - phase (google.cloud.clouddms_v1.types.MigrationJob.Phase): - Output only. The current migration job phase. - type_ (google.cloud.clouddms_v1.types.MigrationJob.Type): - Required. The migration job type. - dump_path (str): - The path to the dump file in Google Cloud Storage, in the - format: (gs://[BUCKET_NAME]/[OBJECT_NAME]). This field and - the "dump_flags" field are mutually exclusive. - dump_flags (google.cloud.clouddms_v1.types.MigrationJob.DumpFlags): - The initial dump flags. This field and the "dump_path" field - are mutually exclusive. - source (str): - Required. The resource name (URI) of the - source connection profile. - destination (str): - Required. The resource name (URI) of the - destination connection profile. - reverse_ssh_connectivity (google.cloud.clouddms_v1.types.ReverseSshConnectivity): - The details needed to communicate to the - source over Reverse SSH tunnel connectivity. - - This field is a member of `oneof`_ ``connectivity``. - vpc_peering_connectivity (google.cloud.clouddms_v1.types.VpcPeeringConnectivity): - The details of the VPC network that the - source database is located in. - - This field is a member of `oneof`_ ``connectivity``. - static_ip_connectivity (google.cloud.clouddms_v1.types.StaticIpConnectivity): - static ip connectivity data (default, no - additional details needed). - - This field is a member of `oneof`_ ``connectivity``. - duration (google.protobuf.duration_pb2.Duration): - Output only. The duration of the migration - job (in seconds). A duration in seconds with up - to nine fractional digits, terminated by 's'. - Example: "3.5s". - error (google.rpc.status_pb2.Status): - Output only. The error details in case of - state FAILED. - source_database (google.cloud.clouddms_v1.types.DatabaseType): - The database engine type and provider of the - source. - destination_database (google.cloud.clouddms_v1.types.DatabaseType): - The database engine type and provider of the - destination. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. If the migration job is - completed, the time when it was completed. - conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspaceInfo): - The conversion workspace used by the - migration. - filter (str): - This field can be used to select the entities - to migrate as part of the migration job. It uses - AIP-160 notation to select a subset of the - entities configured on the associated - conversion-workspace. This field should not be - set on migration-jobs that are not associated - with a conversion workspace. - cmek_key_name (str): - The CMEK (customer-managed encryption key) fully qualified - key name used for the migration job. This field supports all - migration jobs types except for: - - - Mysql to Mysql (use the cmek field in the cloudsql - connection profile instead). - - PostrgeSQL to PostgreSQL (use the cmek field in the - cloudsql connection profile instead). - - PostgreSQL to AlloyDB (use the kms_key_name field in the - alloydb connection profile instead). Each Cloud CMEK key - has the following format: - projects/[PROJECT]/locations/[REGION]/keyRings/[RING]/cryptoKeys/[KEY_NAME] - """ - class State(proto.Enum): - r"""The current migration job states. - - Values: - STATE_UNSPECIFIED (0): - The state of the migration job is unknown. - MAINTENANCE (1): - The migration job is down for maintenance. - DRAFT (2): - The migration job is in draft mode and no - resources are created. - CREATING (3): - The migration job is being created. - NOT_STARTED (4): - The migration job is created and not started. - RUNNING (5): - The migration job is running. - FAILED (6): - The migration job failed. - COMPLETED (7): - The migration job has been completed. - DELETING (8): - The migration job is being deleted. - STOPPING (9): - The migration job is being stopped. - STOPPED (10): - The migration job is currently stopped. - DELETED (11): - The migration job has been deleted. - UPDATING (12): - The migration job is being updated. - STARTING (13): - The migration job is starting. - RESTARTING (14): - The migration job is restarting. - RESUMING (15): - The migration job is resuming. - """ - STATE_UNSPECIFIED = 0 - MAINTENANCE = 1 - DRAFT = 2 - CREATING = 3 - NOT_STARTED = 4 - RUNNING = 5 - FAILED = 6 - COMPLETED = 7 - DELETING = 8 - STOPPING = 9 - STOPPED = 10 - DELETED = 11 - UPDATING = 12 - STARTING = 13 - RESTARTING = 14 - RESUMING = 15 - - class Phase(proto.Enum): - r"""The current migration job phase. - - Values: - PHASE_UNSPECIFIED (0): - The phase of the migration job is unknown. - FULL_DUMP (1): - The migration job is in the full dump phase. - CDC (2): - The migration job is CDC phase. - PROMOTE_IN_PROGRESS (3): - The migration job is running the promote - phase. - WAITING_FOR_SOURCE_WRITES_TO_STOP (4): - Only RDS flow - waiting for source writes to - stop - PREPARING_THE_DUMP (5): - Only RDS flow - the sources writes stopped, - waiting for dump to begin - """ - PHASE_UNSPECIFIED = 0 - FULL_DUMP = 1 - CDC = 2 - PROMOTE_IN_PROGRESS = 3 - WAITING_FOR_SOURCE_WRITES_TO_STOP = 4 - PREPARING_THE_DUMP = 5 - - class Type(proto.Enum): - r"""The type of migration job (one-time or continuous). - - Values: - TYPE_UNSPECIFIED (0): - The type of the migration job is unknown. - ONE_TIME (1): - The migration job is a one time migration. - CONTINUOUS (2): - The migration job is a continuous migration. - """ - TYPE_UNSPECIFIED = 0 - ONE_TIME = 1 - CONTINUOUS = 2 - - class DumpFlag(proto.Message): - r"""Dump flag definition. - - Attributes: - name (str): - The name of the flag - value (str): - The value of the flag. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - value: str = proto.Field( - proto.STRING, - number=2, - ) - - class DumpFlags(proto.Message): - r"""Dump flags definition. - - Attributes: - dump_flags (MutableSequence[google.cloud.clouddms_v1.types.MigrationJob.DumpFlag]): - The flags for the initial dump. - """ - - dump_flags: MutableSequence['MigrationJob.DumpFlag'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='MigrationJob.DumpFlag', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - display_name: str = proto.Field( - proto.STRING, - number=5, - ) - state: State = proto.Field( - proto.ENUM, - number=6, - enum=State, - ) - phase: Phase = proto.Field( - proto.ENUM, - number=7, - enum=Phase, - ) - type_: Type = proto.Field( - proto.ENUM, - number=8, - enum=Type, - ) - dump_path: str = proto.Field( - proto.STRING, - number=9, - ) - dump_flags: DumpFlags = proto.Field( - proto.MESSAGE, - number=17, - message=DumpFlags, - ) - source: str = proto.Field( - proto.STRING, - number=10, - ) - destination: str = proto.Field( - proto.STRING, - number=11, - ) - reverse_ssh_connectivity: 'ReverseSshConnectivity' = proto.Field( - proto.MESSAGE, - number=101, - oneof='connectivity', - message='ReverseSshConnectivity', - ) - vpc_peering_connectivity: 'VpcPeeringConnectivity' = proto.Field( - proto.MESSAGE, - number=102, - oneof='connectivity', - message='VpcPeeringConnectivity', - ) - static_ip_connectivity: 'StaticIpConnectivity' = proto.Field( - proto.MESSAGE, - number=103, - oneof='connectivity', - message='StaticIpConnectivity', - ) - duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=12, - message=duration_pb2.Duration, - ) - error: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=13, - message=status_pb2.Status, - ) - source_database: 'DatabaseType' = proto.Field( - proto.MESSAGE, - number=14, - message='DatabaseType', - ) - destination_database: 'DatabaseType' = proto.Field( - proto.MESSAGE, - number=15, - message='DatabaseType', - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=16, - message=timestamp_pb2.Timestamp, - ) - conversion_workspace: 'ConversionWorkspaceInfo' = proto.Field( - proto.MESSAGE, - number=18, - message='ConversionWorkspaceInfo', - ) - filter: str = proto.Field( - proto.STRING, - number=20, - ) - cmek_key_name: str = proto.Field( - proto.STRING, - number=21, - ) - - -class ConversionWorkspaceInfo(proto.Message): - r"""A conversion workspace's version. - - Attributes: - name (str): - The resource name (URI) of the conversion - workspace. - commit_id (str): - The commit ID of the conversion workspace. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - commit_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ConnectionProfile(proto.Message): - r"""A connection profile definition. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The name of this connection profile resource - in the form of - projects/{project}/locations/{location}/connectionProfiles/{connectionProfile}. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp when the resource - was created. A timestamp in RFC3339 UTC "Zulu" - format, accurate to nanoseconds. Example: - "2014-10-02T15:01:23.045123456Z". - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp when the resource - was last updated. A timestamp in RFC3339 UTC - "Zulu" format, accurate to nanoseconds. Example: - "2014-10-02T15:01:23.045123456Z". - labels (MutableMapping[str, str]): - The resource labels for connection profile to use to - annotate any related underlying resources such as Compute - Engine VMs. An object containing a list of "key": "value" - pairs. - - Example: - ``{ "name": "wrench", "mass": "1.3kg", "count": "3" }``. - state (google.cloud.clouddms_v1.types.ConnectionProfile.State): - The current connection profile state (e.g. - DRAFT, READY, or FAILED). - display_name (str): - The connection profile display name. - mysql (google.cloud.clouddms_v1.types.MySqlConnectionProfile): - A MySQL database connection profile. - - This field is a member of `oneof`_ ``connection_profile``. - postgresql (google.cloud.clouddms_v1.types.PostgreSqlConnectionProfile): - A PostgreSQL database connection profile. - - This field is a member of `oneof`_ ``connection_profile``. - oracle (google.cloud.clouddms_v1.types.OracleConnectionProfile): - An Oracle database connection profile. - - This field is a member of `oneof`_ ``connection_profile``. - cloudsql (google.cloud.clouddms_v1.types.CloudSqlConnectionProfile): - A CloudSQL database connection profile. - - This field is a member of `oneof`_ ``connection_profile``. - alloydb (google.cloud.clouddms_v1.types.AlloyDbConnectionProfile): - An AlloyDB cluster connection profile. - - This field is a member of `oneof`_ ``connection_profile``. - error (google.rpc.status_pb2.Status): - Output only. The error details in case of - state FAILED. - provider (google.cloud.clouddms_v1.types.DatabaseProvider): - The database provider. - """ - class State(proto.Enum): - r"""The current connection profile state (e.g. DRAFT, READY, or - FAILED). - - Values: - STATE_UNSPECIFIED (0): - The state of the connection profile is - unknown. - DRAFT (1): - The connection profile is in draft mode and - fully editable. - CREATING (2): - The connection profile is being created. - READY (3): - The connection profile is ready. - UPDATING (4): - The connection profile is being updated. - DELETING (5): - The connection profile is being deleted. - DELETED (6): - The connection profile has been deleted. - FAILED (7): - The last action on the connection profile - failed. - """ - STATE_UNSPECIFIED = 0 - DRAFT = 1 - CREATING = 2 - READY = 3 - UPDATING = 4 - DELETING = 5 - DELETED = 6 - FAILED = 7 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - mysql: 'MySqlConnectionProfile' = proto.Field( - proto.MESSAGE, - number=100, - oneof='connection_profile', - message='MySqlConnectionProfile', - ) - postgresql: 'PostgreSqlConnectionProfile' = proto.Field( - proto.MESSAGE, - number=101, - oneof='connection_profile', - message='PostgreSqlConnectionProfile', - ) - oracle: 'OracleConnectionProfile' = proto.Field( - proto.MESSAGE, - number=104, - oneof='connection_profile', - message='OracleConnectionProfile', - ) - cloudsql: 'CloudSqlConnectionProfile' = proto.Field( - proto.MESSAGE, - number=102, - oneof='connection_profile', - message='CloudSqlConnectionProfile', - ) - alloydb: 'AlloyDbConnectionProfile' = proto.Field( - proto.MESSAGE, - number=105, - oneof='connection_profile', - message='AlloyDbConnectionProfile', - ) - error: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=7, - message=status_pb2.Status, - ) - provider: 'DatabaseProvider' = proto.Field( - proto.ENUM, - number=8, - enum='DatabaseProvider', - ) - - -class MigrationJobVerificationError(proto.Message): - r"""Error message of a verification Migration job. - - Attributes: - error_code (google.cloud.clouddms_v1.types.MigrationJobVerificationError.ErrorCode): - Output only. An instance of ErrorCode - specifying the error that occurred. - error_message (str): - Output only. A formatted message with further - details about the error and a CTA. - error_detail_message (str): - Output only. A specific detailed error - message, if supplied by the engine. - """ - class ErrorCode(proto.Enum): - r"""A general error code describing the type of error that - occurred. - - Values: - ERROR_CODE_UNSPECIFIED (0): - An unknown error occurred - CONNECTION_FAILURE (1): - We failed to connect to one of the connection - profile. - AUTHENTICATION_FAILURE (2): - We failed to authenticate to one of the - connection profile. - INVALID_CONNECTION_PROFILE_CONFIG (3): - One of the involved connection profiles has - an invalid configuration. - VERSION_INCOMPATIBILITY (4): - The versions of the source and the - destination are incompatible. - CONNECTION_PROFILE_TYPES_INCOMPATIBILITY (5): - The types of the source and the destination - are incompatible. - NO_PGLOGICAL_INSTALLED (7): - No pglogical extension installed on - databases, applicable for postgres. - PGLOGICAL_NODE_ALREADY_EXISTS (8): - pglogical node already exists on databases, - applicable for postgres. - INVALID_WAL_LEVEL (9): - The value of parameter wal_level is not set to logical. - INVALID_SHARED_PRELOAD_LIBRARY (10): - The value of parameter shared_preload_libraries does not - include pglogical. - INSUFFICIENT_MAX_REPLICATION_SLOTS (11): - The value of parameter max_replication_slots is not - sufficient. - INSUFFICIENT_MAX_WAL_SENDERS (12): - The value of parameter max_wal_senders is not sufficient. - INSUFFICIENT_MAX_WORKER_PROCESSES (13): - The value of parameter max_worker_processes is not - sufficient. - UNSUPPORTED_EXTENSIONS (14): - Extensions installed are either not supported - or having unsupported versions. - UNSUPPORTED_MIGRATION_TYPE (15): - Unsupported migration type. - INVALID_RDS_LOGICAL_REPLICATION (16): - Invalid RDS logical replication. - UNSUPPORTED_GTID_MODE (17): - The gtid_mode is not supported, applicable for MySQL. - UNSUPPORTED_TABLE_DEFINITION (18): - The table definition is not support due to - missing primary key or replica identity. - UNSUPPORTED_DEFINER (19): - The definer is not supported. - CANT_RESTART_RUNNING_MIGRATION (21): - Migration is already running at the time of - restart request. - TABLES_WITH_LIMITED_SUPPORT (24): - The source has tables with limited support. - E.g. PostgreSQL tables without primary keys. - UNSUPPORTED_DATABASE_LOCALE (25): - The source uses an unsupported locale. - UNSUPPORTED_DATABASE_FDW_CONFIG (26): - The source uses an unsupported Foreign Data - Wrapper configuration. - ERROR_RDBMS (27): - There was an underlying RDBMS error. - SOURCE_SIZE_EXCEEDS_THRESHOLD (28): - The source DB size in Bytes exceeds a certain - threshold. The migration might require an - increase of quota, or might not be supported. - """ - ERROR_CODE_UNSPECIFIED = 0 - CONNECTION_FAILURE = 1 - AUTHENTICATION_FAILURE = 2 - INVALID_CONNECTION_PROFILE_CONFIG = 3 - VERSION_INCOMPATIBILITY = 4 - CONNECTION_PROFILE_TYPES_INCOMPATIBILITY = 5 - NO_PGLOGICAL_INSTALLED = 7 - PGLOGICAL_NODE_ALREADY_EXISTS = 8 - INVALID_WAL_LEVEL = 9 - INVALID_SHARED_PRELOAD_LIBRARY = 10 - INSUFFICIENT_MAX_REPLICATION_SLOTS = 11 - INSUFFICIENT_MAX_WAL_SENDERS = 12 - INSUFFICIENT_MAX_WORKER_PROCESSES = 13 - UNSUPPORTED_EXTENSIONS = 14 - UNSUPPORTED_MIGRATION_TYPE = 15 - INVALID_RDS_LOGICAL_REPLICATION = 16 - UNSUPPORTED_GTID_MODE = 17 - UNSUPPORTED_TABLE_DEFINITION = 18 - UNSUPPORTED_DEFINER = 19 - CANT_RESTART_RUNNING_MIGRATION = 21 - TABLES_WITH_LIMITED_SUPPORT = 24 - UNSUPPORTED_DATABASE_LOCALE = 25 - UNSUPPORTED_DATABASE_FDW_CONFIG = 26 - ERROR_RDBMS = 27 - SOURCE_SIZE_EXCEEDS_THRESHOLD = 28 - - error_code: ErrorCode = proto.Field( - proto.ENUM, - number=1, - enum=ErrorCode, - ) - error_message: str = proto.Field( - proto.STRING, - number=2, - ) - error_detail_message: str = proto.Field( - proto.STRING, - number=3, - ) - - -class PrivateConnection(proto.Message): - r"""The PrivateConnection resource is used to establish private - connectivity with the customer's network. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The name of the resource. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The create time of the resource. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update time of the - resource. - labels (MutableMapping[str, str]): - The resource labels for private connections to use to - annotate any related underlying resources such as Compute - Engine VMs. An object containing a list of "key": "value" - pairs. - - Example: - ``{ "name": "wrench", "mass": "1.3kg", "count": "3" }``. - display_name (str): - The private connection display name. - state (google.cloud.clouddms_v1.types.PrivateConnection.State): - Output only. The state of the private - connection. - error (google.rpc.status_pb2.Status): - Output only. The error details in case of - state FAILED. - vpc_peering_config (google.cloud.clouddms_v1.types.VpcPeeringConfig): - VPC peering configuration. - - This field is a member of `oneof`_ ``connectivity``. - """ - class State(proto.Enum): - r"""Private Connection state. - - Values: - STATE_UNSPECIFIED (0): - No description available. - CREATING (1): - The private connection is in creation state - - creating resources. - CREATED (2): - The private connection has been created with - all of its resources. - FAILED (3): - The private connection creation has failed. - DELETING (4): - The private connection is being deleted. - FAILED_TO_DELETE (5): - Delete request has failed, resource is in - invalid state. - DELETED (6): - The private connection has been deleted. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - CREATED = 2 - FAILED = 3 - DELETING = 4 - FAILED_TO_DELETE = 5 - DELETED = 6 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - display_name: str = proto.Field( - proto.STRING, - number=5, - ) - state: State = proto.Field( - proto.ENUM, - number=6, - enum=State, - ) - error: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=7, - message=status_pb2.Status, - ) - vpc_peering_config: 'VpcPeeringConfig' = proto.Field( - proto.MESSAGE, - number=100, - oneof='connectivity', - message='VpcPeeringConfig', - ) - - -class VpcPeeringConfig(proto.Message): - r"""The VPC peering configuration is used to create VPC peering - with the consumer's VPC. - - Attributes: - vpc_name (str): - Required. Fully qualified name of the VPC - that Database Migration Service will peer to. - subnet (str): - Required. A free subnet for peering. (CIDR of - /29) - """ - - vpc_name: str = proto.Field( - proto.STRING, - number=1, - ) - subnet: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini deleted file mode 100644 index 574c5ae..0000000 --- a/owl-bot-staging/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py deleted file mode 100644 index ee175f1..0000000 --- a/owl-bot-staging/v1/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/clouddms_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_async.py deleted file mode 100644 index e7615c7..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_async.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateConnectionProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_create_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - connection_profile = clouddms_v1.ConnectionProfile() - connection_profile.mysql.host = "host_value" - connection_profile.mysql.port = 453 - connection_profile.mysql.username = "username_value" - connection_profile.mysql.password = "password_value" - - request = clouddms_v1.CreateConnectionProfileRequest( - parent="parent_value", - connection_profile_id="connection_profile_id_value", - connection_profile=connection_profile, - ) - - # Make the request - operation = client.create_connection_profile(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py deleted file mode 100644 index bf8cd78..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateConnectionProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_create_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - connection_profile = clouddms_v1.ConnectionProfile() - connection_profile.mysql.host = "host_value" - connection_profile.mysql.port = 453 - connection_profile.mysql.username = "username_value" - connection_profile.mysql.password = "password_value" - - request = clouddms_v1.CreateConnectionProfileRequest( - parent="parent_value", - connection_profile_id="connection_profile_id_value", - connection_profile=connection_profile, - ) - - # Make the request - operation = client.create_connection_profile(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_async.py deleted file mode 100644 index cdeffd2..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_async.py +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_CreateMigrationJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_create_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - migration_job = clouddms_v1.MigrationJob() - migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" - migration_job.reverse_ssh_connectivity.vm_port = 775 - migration_job.type_ = "CONTINUOUS" - migration_job.source = "source_value" - migration_job.destination = "destination_value" - - request = clouddms_v1.CreateMigrationJobRequest( - parent="parent_value", - migration_job_id="migration_job_id_value", - migration_job=migration_job, - ) - - # Make the request - operation = client.create_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_CreateMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_sync.py deleted file mode 100644 index a7a40ba..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_migration_job_sync.py +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_CreateMigrationJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_create_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - migration_job = clouddms_v1.MigrationJob() - migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" - migration_job.reverse_ssh_connectivity.vm_port = 775 - migration_job.type_ = "CONTINUOUS" - migration_job.source = "source_value" - migration_job.destination = "destination_value" - - request = clouddms_v1.CreateMigrationJobRequest( - parent="parent_value", - migration_job_id="migration_job_id_value", - migration_job=migration_job, - ) - - # Make the request - operation = client.create_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_CreateMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py deleted file mode 100644 index e9c7703..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteConnectionProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_delete_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.DeleteConnectionProfileRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_connection_profile(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py deleted file mode 100644 index 4c05c38..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteConnectionProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_delete_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.DeleteConnectionProfileRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_connection_profile(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_async.py deleted file mode 100644 index 15e2bb1..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_delete_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.DeleteMigrationJobRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py deleted file mode 100644 index f00b4de..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_delete_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.DeleteMigrationJobRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py deleted file mode 100644 index 8195933..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GenerateSshScript -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_GenerateSshScript_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_generate_ssh_script(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - vm_creation_config = clouddms_v1.VmCreationConfig() - vm_creation_config.vm_machine_type = "vm_machine_type_value" - - request = clouddms_v1.GenerateSshScriptRequest( - vm_creation_config=vm_creation_config, - vm="vm_value", - ) - - # Make the request - response = await client.generate_ssh_script(request=request) - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_GenerateSshScript_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py deleted file mode 100644 index 4ddf07a..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GenerateSshScript -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_GenerateSshScript_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_generate_ssh_script(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - vm_creation_config = clouddms_v1.VmCreationConfig() - vm_creation_config.vm_machine_type = "vm_machine_type_value" - - request = clouddms_v1.GenerateSshScriptRequest( - vm_creation_config=vm_creation_config, - vm="vm_value", - ) - - # Make the request - response = client.generate_ssh_script(request=request) - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_GenerateSshScript_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_async.py deleted file mode 100644 index 8204812..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetConnectionProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_GetConnectionProfile_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_get_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.GetConnectionProfileRequest( - name="name_value", - ) - - # Make the request - response = await client.get_connection_profile(request=request) - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_GetConnectionProfile_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py deleted file mode 100644 index abc1e54..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetConnectionProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_GetConnectionProfile_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_get_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.GetConnectionProfileRequest( - name="name_value", - ) - - # Make the request - response = client.get_connection_profile(request=request) - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_GetConnectionProfile_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_async.py deleted file mode 100644 index e9513b0..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_GetMigrationJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_get_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.GetMigrationJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_migration_job(request=request) - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_GetMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_sync.py deleted file mode 100644 index f9c5c2f..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_migration_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_GetMigrationJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_get_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.GetMigrationJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_migration_job(request=request) - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_GetMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py deleted file mode 100644 index 48cde5c..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListConnectionProfiles -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_list_connection_profiles(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.ListConnectionProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connection_profiles(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py deleted file mode 100644 index e2439ba..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListConnectionProfiles -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_list_connection_profiles(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.ListConnectionProfilesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connection_profiles(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py deleted file mode 100644 index 35d5950..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMigrationJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_ListMigrationJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_list_migration_jobs(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.ListMigrationJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END datamigration_v1_generated_DataMigrationService_ListMigrationJobs_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py deleted file mode 100644 index de9611f..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMigrationJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_ListMigrationJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_list_migration_jobs(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.ListMigrationJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END datamigration_v1_generated_DataMigrationService_ListMigrationJobs_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_async.py deleted file mode 100644 index a7e0d5b..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PromoteMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_promote_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.PromoteMigrationJobRequest( - ) - - # Make the request - operation = client.promote_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py deleted file mode 100644 index 1972401..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PromoteMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_promote_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.PromoteMigrationJobRequest( - ) - - # Make the request - operation = client.promote_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_async.py deleted file mode 100644 index 22db451..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestartMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_RestartMigrationJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_restart_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.RestartMigrationJobRequest( - ) - - # Make the request - operation = client.restart_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_RestartMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py deleted file mode 100644 index 5f8ab15..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestartMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_RestartMigrationJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_restart_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.RestartMigrationJobRequest( - ) - - # Make the request - operation = client.restart_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_RestartMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_async.py deleted file mode 100644 index c59e5dd..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ResumeMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_resume_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.ResumeMigrationJobRequest( - ) - - # Make the request - operation = client.resume_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py deleted file mode 100644 index 75d40af..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ResumeMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_resume_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.ResumeMigrationJobRequest( - ) - - # Make the request - operation = client.resume_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_async.py deleted file mode 100644 index bfe21ee..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StartMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_StartMigrationJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_start_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.StartMigrationJobRequest( - ) - - # Make the request - operation = client.start_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_StartMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_sync.py deleted file mode 100644 index b73ede5..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_start_migration_job_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StartMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_StartMigrationJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_start_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.StartMigrationJobRequest( - ) - - # Make the request - operation = client.start_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_StartMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_async.py deleted file mode 100644 index b6352ee..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StopMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_StopMigrationJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_stop_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.StopMigrationJobRequest( - ) - - # Make the request - operation = client.stop_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_StopMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py deleted file mode 100644 index 9c6fdc4..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StopMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_StopMigrationJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_stop_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.StopMigrationJobRequest( - ) - - # Make the request - operation = client.stop_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_StopMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_async.py deleted file mode 100644 index 4776efa..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateConnectionProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_update_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - connection_profile = clouddms_v1.ConnectionProfile() - connection_profile.mysql.host = "host_value" - connection_profile.mysql.port = 453 - connection_profile.mysql.username = "username_value" - connection_profile.mysql.password = "password_value" - - request = clouddms_v1.UpdateConnectionProfileRequest( - connection_profile=connection_profile, - ) - - # Make the request - operation = client.update_connection_profile(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py deleted file mode 100644 index 68cae51..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateConnectionProfile -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_update_connection_profile(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - connection_profile = clouddms_v1.ConnectionProfile() - connection_profile.mysql.host = "host_value" - connection_profile.mysql.port = 453 - connection_profile.mysql.username = "username_value" - connection_profile.mysql.password = "password_value" - - request = clouddms_v1.UpdateConnectionProfileRequest( - connection_profile=connection_profile, - ) - - # Make the request - operation = client.update_connection_profile(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_async.py deleted file mode 100644 index 76363dc..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_async.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_update_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - migration_job = clouddms_v1.MigrationJob() - migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" - migration_job.reverse_ssh_connectivity.vm_port = 775 - migration_job.type_ = "CONTINUOUS" - migration_job.source = "source_value" - migration_job.destination = "destination_value" - - request = clouddms_v1.UpdateMigrationJobRequest( - migration_job=migration_job, - ) - - # Make the request - operation = client.update_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_sync.py deleted file mode 100644 index 35eee8d..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_migration_job_sync.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_update_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - migration_job = clouddms_v1.MigrationJob() - migration_job.reverse_ssh_connectivity.vm_ip = "vm_ip_value" - migration_job.reverse_ssh_connectivity.vm_port = 775 - migration_job.type_ = "CONTINUOUS" - migration_job.source = "source_value" - migration_job.destination = "destination_value" - - request = clouddms_v1.UpdateMigrationJobRequest( - migration_job=migration_job, - ) - - # Make the request - operation = client.update_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_async.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_async.py deleted file mode 100644 index daee5a7..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for VerifyMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -async def sample_verify_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceAsyncClient() - - # Initialize request argument(s) - request = clouddms_v1.VerifyMigrationJobRequest( - ) - - # Make the request - operation = client.verify_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py deleted file mode 100644 index 901a5f7..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for VerifyMigrationJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dms - - -# [START datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import clouddms_v1 - - -def sample_verify_migration_job(): - # Create a client - client = clouddms_v1.DataMigrationServiceClient() - - # Initialize request argument(s) - request = clouddms_v1.VerifyMigrationJobRequest( - ) - - # Make the request - operation = client.verify_migration_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json deleted file mode 100644 index 10d2ba4..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json +++ /dev/null @@ -1,5771 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.clouddms.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-dms", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.apply_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ApplyConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ApplyConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "apply_conversion_workspace" - }, - "description": "Sample for ApplyConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.apply_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ApplyConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ApplyConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "apply_conversion_workspace" - }, - "description": "Sample for ApplyConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.commit_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CommitConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "CommitConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "commit_conversion_workspace" - }, - "description": "Sample for CommitConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.commit_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CommitConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "CommitConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "commit_conversion_workspace" - }, - "description": "Sample for CommitConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.convert_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ConvertConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ConvertConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "convert_conversion_workspace" - }, - "description": "Sample for ConvertConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.convert_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ConvertConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ConvertConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "convert_conversion_workspace" - }, - "description": "Sample for ConvertConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_connection_profile", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConnectionProfile", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "CreateConnectionProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.CreateConnectionProfileRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "connection_profile", - "type": "google.cloud.clouddms_v1.types.ConnectionProfile" - }, - { - "name": "connection_profile_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_connection_profile" - }, - "description": "Sample for CreateConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_create_connection_profile_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_async", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_create_connection_profile_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_connection_profile", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConnectionProfile", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "CreateConnectionProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.CreateConnectionProfileRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "connection_profile", - "type": "google.cloud.clouddms_v1.types.ConnectionProfile" - }, - { - "name": "connection_profile_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_connection_profile" - }, - "description": "Sample for CreateConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_sync", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "CreateConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "conversion_workspace", - "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" - }, - { - "name": "conversion_workspace_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_conversion_workspace" - }, - "description": "Sample for CreateConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_async", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "CreateConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "conversion_workspace", - "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" - }, - { - "name": "conversion_workspace_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_conversion_workspace" - }, - "description": "Sample for CreateConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_sync", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "CreateMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.CreateMigrationJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "migration_job", - "type": "google.cloud.clouddms_v1.types.MigrationJob" - }, - { - "name": "migration_job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_migration_job" - }, - "description": "Sample for CreateMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_create_migration_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMigrationJob_async", - "segments": [ - { - "end": 64, - "start": 27, - "type": "FULL" - }, - { - "end": 64, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 54, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 61, - "start": 55, - "type": "REQUEST_EXECUTION" - }, - { - "end": 65, - "start": 62, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_create_migration_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "CreateMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.CreateMigrationJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "migration_job", - "type": "google.cloud.clouddms_v1.types.MigrationJob" - }, - { - "name": "migration_job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_migration_job" - }, - "description": "Sample for CreateMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_create_migration_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMigrationJob_sync", - "segments": [ - { - "end": 64, - "start": 27, - "type": "FULL" - }, - { - "end": 64, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 54, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 61, - "start": 55, - "type": "REQUEST_EXECUTION" - }, - { - "end": 65, - "start": 62, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_create_migration_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_private_connection", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreatePrivateConnection", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "CreatePrivateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "private_connection", - "type": "google.cloud.clouddms_v1.types.PrivateConnection" - }, - { - "name": "private_connection_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_private_connection" - }, - "description": "Sample for CreatePrivateConnection", - "file": "datamigration_v1_generated_data_migration_service_create_private_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_create_private_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_private_connection", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreatePrivateConnection", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "CreatePrivateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "private_connection", - "type": "google.cloud.clouddms_v1.types.PrivateConnection" - }, - { - "name": "private_connection_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_private_connection" - }, - "description": "Sample for CreatePrivateConnection", - "file": "datamigration_v1_generated_data_migration_service_create_private_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_create_private_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_connection_profile", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConnectionProfile", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "DeleteConnectionProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_connection_profile" - }, - "description": "Sample for DeleteConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_connection_profile", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConnectionProfile", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "DeleteConnectionProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_connection_profile" - }, - "description": "Sample for DeleteConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "DeleteConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_conversion_workspace" - }, - "description": "Sample for DeleteConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "DeleteConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_conversion_workspace" - }, - "description": "Sample for DeleteConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "DeleteMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.DeleteMigrationJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_migration_job" - }, - "description": "Sample for DeleteMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_delete_migration_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_delete_migration_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "DeleteMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.DeleteMigrationJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_migration_job" - }, - "description": "Sample for DeleteMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_private_connection", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeletePrivateConnection", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "DeletePrivateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_private_connection" - }, - "description": "Sample for DeletePrivateConnection", - "file": "datamigration_v1_generated_data_migration_service_delete_private_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_delete_private_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_private_connection", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeletePrivateConnection", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "DeletePrivateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_private_connection" - }, - "description": "Sample for DeletePrivateConnection", - "file": "datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.describe_conversion_workspace_revisions", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeConversionWorkspaceRevisions", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "DescribeConversionWorkspaceRevisions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse", - "shortName": "describe_conversion_workspace_revisions" - }, - "description": "Sample for DescribeConversionWorkspaceRevisions", - "file": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.describe_conversion_workspace_revisions", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeConversionWorkspaceRevisions", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "DescribeConversionWorkspaceRevisions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse", - "shortName": "describe_conversion_workspace_revisions" - }, - "description": "Sample for DescribeConversionWorkspaceRevisions", - "file": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.describe_database_entities", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeDatabaseEntities", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "DescribeDatabaseEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesAsyncPager", - "shortName": "describe_database_entities" - }, - "description": "Sample for DescribeDatabaseEntities", - "file": "datamigration_v1_generated_data_migration_service_describe_database_entities_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_describe_database_entities_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.describe_database_entities", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeDatabaseEntities", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "DescribeDatabaseEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesPager", - "shortName": "describe_database_entities" - }, - "description": "Sample for DescribeDatabaseEntities", - "file": "datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.fetch_static_ips", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.FetchStaticIps", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "FetchStaticIps" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.FetchStaticIpsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsAsyncPager", - "shortName": "fetch_static_ips" - }, - "description": "Sample for FetchStaticIps", - "file": "datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_FetchStaticIps_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.fetch_static_ips", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.FetchStaticIps", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "FetchStaticIps" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.FetchStaticIpsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsPager", - "shortName": "fetch_static_ips" - }, - "description": "Sample for FetchStaticIps", - "file": "datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_FetchStaticIps_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.generate_ssh_script", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateSshScript", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "GenerateSshScript" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.GenerateSshScriptRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.types.SshScript", - "shortName": "generate_ssh_script" - }, - "description": "Sample for GenerateSshScript", - "file": "datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateSshScript_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.generate_ssh_script", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateSshScript", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "GenerateSshScript" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.GenerateSshScriptRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.types.SshScript", - "shortName": "generate_ssh_script" - }, - "description": "Sample for GenerateSshScript", - "file": "datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateSshScript_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_connection_profile", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConnectionProfile", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "GetConnectionProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.GetConnectionProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.types.ConnectionProfile", - "shortName": "get_connection_profile" - }, - "description": "Sample for GetConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_get_connection_profile_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetConnectionProfile_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_get_connection_profile_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_connection_profile", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConnectionProfile", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "GetConnectionProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.GetConnectionProfileRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.types.ConnectionProfile", - "shortName": "get_connection_profile" - }, - "description": "Sample for GetConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetConnectionProfile_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "GetConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.types.ConversionWorkspace", - "shortName": "get_conversion_workspace" - }, - "description": "Sample for GetConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "GetConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.types.ConversionWorkspace", - "shortName": "get_conversion_workspace" - }, - "description": "Sample for GetConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "GetMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.GetMigrationJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.types.MigrationJob", - "shortName": "get_migration_job" - }, - "description": "Sample for GetMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_get_migration_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetMigrationJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_get_migration_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "GetMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.GetMigrationJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.types.MigrationJob", - "shortName": "get_migration_job" - }, - "description": "Sample for GetMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_get_migration_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetMigrationJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_get_migration_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_private_connection", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetPrivateConnection", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "GetPrivateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.GetPrivateConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.types.PrivateConnection", - "shortName": "get_private_connection" - }, - "description": "Sample for GetPrivateConnection", - "file": "datamigration_v1_generated_data_migration_service_get_private_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetPrivateConnection_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_get_private_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_private_connection", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetPrivateConnection", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "GetPrivateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.GetPrivateConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.types.PrivateConnection", - "shortName": "get_private_connection" - }, - "description": "Sample for GetPrivateConnection", - "file": "datamigration_v1_generated_data_migration_service_get_private_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetPrivateConnection_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_get_private_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.import_mapping_rules", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ImportMappingRules", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ImportMappingRules" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ImportMappingRulesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "import_mapping_rules" - }, - "description": "Sample for ImportMappingRules", - "file": "datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ImportMappingRules_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.import_mapping_rules", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ImportMappingRules", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ImportMappingRules" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ImportMappingRulesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "import_mapping_rules" - }, - "description": "Sample for ImportMappingRules", - "file": "datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ImportMappingRules_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_connection_profiles", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConnectionProfiles", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ListConnectionProfiles" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ListConnectionProfilesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesAsyncPager", - "shortName": "list_connection_profiles" - }, - "description": "Sample for ListConnectionProfiles", - "file": "datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_connection_profiles", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConnectionProfiles", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ListConnectionProfiles" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ListConnectionProfilesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesPager", - "shortName": "list_connection_profiles" - }, - "description": "Sample for ListConnectionProfiles", - "file": "datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_conversion_workspaces", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConversionWorkspaces", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ListConversionWorkspaces" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesAsyncPager", - "shortName": "list_conversion_workspaces" - }, - "description": "Sample for ListConversionWorkspaces", - "file": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_conversion_workspaces", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConversionWorkspaces", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ListConversionWorkspaces" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesPager", - "shortName": "list_conversion_workspaces" - }, - "description": "Sample for ListConversionWorkspaces", - "file": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_migration_jobs", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMigrationJobs", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ListMigrationJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ListMigrationJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsAsyncPager", - "shortName": "list_migration_jobs" - }, - "description": "Sample for ListMigrationJobs", - "file": "datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListMigrationJobs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_migration_jobs", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMigrationJobs", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ListMigrationJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ListMigrationJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsPager", - "shortName": "list_migration_jobs" - }, - "description": "Sample for ListMigrationJobs", - "file": "datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListMigrationJobs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_private_connections", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListPrivateConnections", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ListPrivateConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsAsyncPager", - "shortName": "list_private_connections" - }, - "description": "Sample for ListPrivateConnections", - "file": "datamigration_v1_generated_data_migration_service_list_private_connections_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListPrivateConnections_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_list_private_connections_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_private_connections", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListPrivateConnections", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ListPrivateConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsPager", - "shortName": "list_private_connections" - }, - "description": "Sample for ListPrivateConnections", - "file": "datamigration_v1_generated_data_migration_service_list_private_connections_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListPrivateConnections_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_list_private_connections_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.promote_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.PromoteMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "PromoteMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.PromoteMigrationJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "promote_migration_job" - }, - "description": "Sample for PromoteMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_promote_migration_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_promote_migration_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.promote_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.PromoteMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "PromoteMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.PromoteMigrationJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "promote_migration_job" - }, - "description": "Sample for PromoteMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.restart_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.RestartMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "RestartMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.RestartMigrationJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "restart_migration_job" - }, - "description": "Sample for RestartMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_restart_migration_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_RestartMigrationJob_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_restart_migration_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.restart_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.RestartMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "RestartMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.RestartMigrationJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "restart_migration_job" - }, - "description": "Sample for RestartMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_RestartMigrationJob_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.resume_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ResumeMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ResumeMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ResumeMigrationJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "resume_migration_job" - }, - "description": "Sample for ResumeMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_resume_migration_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_resume_migration_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.resume_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ResumeMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "ResumeMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.ResumeMigrationJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "resume_migration_job" - }, - "description": "Sample for ResumeMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.rollback_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.RollbackConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "RollbackConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "rollback_conversion_workspace" - }, - "description": "Sample for RollbackConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.rollback_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.RollbackConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "RollbackConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "rollback_conversion_workspace" - }, - "description": "Sample for RollbackConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.search_background_jobs", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.SearchBackgroundJobs", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "SearchBackgroundJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse", - "shortName": "search_background_jobs" - }, - "description": "Sample for SearchBackgroundJobs", - "file": "datamigration_v1_generated_data_migration_service_search_background_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_search_background_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.search_background_jobs", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.SearchBackgroundJobs", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "SearchBackgroundJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse", - "shortName": "search_background_jobs" - }, - "description": "Sample for SearchBackgroundJobs", - "file": "datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.seed_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.SeedConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "SeedConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "seed_conversion_workspace" - }, - "description": "Sample for SeedConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.seed_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.SeedConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "SeedConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "seed_conversion_workspace" - }, - "description": "Sample for SeedConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.start_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.StartMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "StartMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.StartMigrationJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "start_migration_job" - }, - "description": "Sample for StartMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_start_migration_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_StartMigrationJob_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_start_migration_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.start_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.StartMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "StartMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.StartMigrationJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "start_migration_job" - }, - "description": "Sample for StartMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_start_migration_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_StartMigrationJob_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_start_migration_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.stop_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.StopMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "StopMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.StopMigrationJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "stop_migration_job" - }, - "description": "Sample for StopMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_stop_migration_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_StopMigrationJob_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_stop_migration_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.stop_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.StopMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "StopMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.StopMigrationJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "stop_migration_job" - }, - "description": "Sample for StopMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_StopMigrationJob_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_stop_migration_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.update_connection_profile", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConnectionProfile", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "UpdateConnectionProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.UpdateConnectionProfileRequest" - }, - { - "name": "connection_profile", - "type": "google.cloud.clouddms_v1.types.ConnectionProfile" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_connection_profile" - }, - "description": "Sample for UpdateConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_update_connection_profile_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_update_connection_profile_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.update_connection_profile", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConnectionProfile", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "UpdateConnectionProfile" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.UpdateConnectionProfileRequest" - }, - { - "name": "connection_profile", - "type": "google.cloud.clouddms_v1.types.ConnectionProfile" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_connection_profile" - }, - "description": "Sample for UpdateConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConnectionProfile_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.update_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "UpdateConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest" - }, - { - "name": "conversion_workspace", - "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_conversion_workspace" - }, - "description": "Sample for UpdateConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.update_conversion_workspace", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConversionWorkspace", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "UpdateConversionWorkspace" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest" - }, - { - "name": "conversion_workspace", - "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_conversion_workspace" - }, - "description": "Sample for UpdateConversionWorkspace", - "file": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.update_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "UpdateMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.UpdateMigrationJobRequest" - }, - { - "name": "migration_job", - "type": "google.cloud.clouddms_v1.types.MigrationJob" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_migration_job" - }, - "description": "Sample for UpdateMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_update_migration_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_update_migration_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.update_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "UpdateMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.UpdateMigrationJobRequest" - }, - { - "name": "migration_job", - "type": "google.cloud.clouddms_v1.types.MigrationJob" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_migration_job" - }, - "description": "Sample for UpdateMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_update_migration_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateMigrationJob_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_update_migration_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", - "shortName": "DataMigrationServiceAsyncClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.verify_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.VerifyMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "VerifyMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.VerifyMigrationJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "verify_migration_job" - }, - "description": "Sample for VerifyMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_verify_migration_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_verify_migration_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", - "shortName": "DataMigrationServiceClient" - }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.verify_migration_job", - "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.VerifyMigrationJob", - "service": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService", - "shortName": "DataMigrationService" - }, - "shortName": "VerifyMigrationJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.clouddms_v1.types.VerifyMigrationJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "verify_migration_job" - }, - "description": "Sample for VerifyMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_VerifyMigrationJob_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datamigration_v1_generated_data_migration_service_verify_migration_job_sync.py" - } - ] -} diff --git a/owl-bot-staging/v1/scripts/fixup_clouddms_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_clouddms_v1_keywords.py deleted file mode 100644 index ad6ad15..0000000 --- a/owl-bot-staging/v1/scripts/fixup_clouddms_v1_keywords.py +++ /dev/null @@ -1,211 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class clouddmsCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'apply_conversion_workspace': ('name', 'filter', 'connection_profile', ), - 'commit_conversion_workspace': ('name', 'commit_name', ), - 'convert_conversion_workspace': ('name', 'auto_commit', 'filter', ), - 'create_connection_profile': ('parent', 'connection_profile_id', 'connection_profile', 'request_id', 'validate_only', 'skip_validation', ), - 'create_conversion_workspace': ('parent', 'conversion_workspace_id', 'conversion_workspace', 'request_id', ), - 'create_migration_job': ('parent', 'migration_job_id', 'migration_job', 'request_id', ), - 'create_private_connection': ('parent', 'private_connection_id', 'private_connection', 'request_id', 'skip_validation', ), - 'delete_connection_profile': ('name', 'request_id', 'force', ), - 'delete_conversion_workspace': ('name', 'request_id', ), - 'delete_migration_job': ('name', 'request_id', 'force', ), - 'delete_private_connection': ('name', 'request_id', ), - 'describe_conversion_workspace_revisions': ('conversion_workspace', 'commit_id', ), - 'describe_database_entities': ('conversion_workspace', 'page_size', 'page_token', 'tree', 'uncommitted', 'commit_id', 'filter', ), - 'fetch_static_ips': ('name', 'page_size', 'page_token', ), - 'generate_ssh_script': ('vm', 'migration_job', 'vm_creation_config', 'vm_selection_config', 'vm_port', ), - 'get_connection_profile': ('name', ), - 'get_conversion_workspace': ('name', ), - 'get_migration_job': ('name', ), - 'get_private_connection': ('name', ), - 'import_mapping_rules': ('parent', 'rules_format', 'rules_files', 'auto_commit', ), - 'list_connection_profiles': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_conversion_workspaces': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_migration_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_private_connections': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'promote_migration_job': ('name', ), - 'restart_migration_job': ('name', ), - 'resume_migration_job': ('name', ), - 'rollback_conversion_workspace': ('name', ), - 'search_background_jobs': ('conversion_workspace', 'return_most_recent_per_job_type', 'max_size', 'completed_until_time', ), - 'seed_conversion_workspace': ('name', 'auto_commit', 'source_connection_profile', 'destination_connection_profile', ), - 'start_migration_job': ('name', ), - 'stop_migration_job': ('name', ), - 'update_connection_profile': ('update_mask', 'connection_profile', 'request_id', 'validate_only', 'skip_validation', ), - 'update_conversion_workspace': ('update_mask', 'conversion_workspace', 'request_id', ), - 'update_migration_job': ('update_mask', 'migration_job', 'request_id', ), - 'verify_migration_job': ('name', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=clouddmsCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the clouddms client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py deleted file mode 100644 index 2501d9d..0000000 --- a/owl-bot-staging/v1/setup.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-dms' - - -description = "Google Cloud Dms API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/clouddms/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", -] -url = "https://github.com/googleapis/python-dms" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/v1/testing/constraints-3.10.txt b/owl-bot-staging/v1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa..0000000 --- a/owl-bot-staging/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v1/testing/constraints-3.11.txt b/owl-bot-staging/v1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa..0000000 --- a/owl-bot-staging/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v1/testing/constraints-3.12.txt b/owl-bot-staging/v1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa..0000000 --- a/owl-bot-staging/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v1/testing/constraints-3.7.txt b/owl-bot-staging/v1/testing/constraints-3.7.txt deleted file mode 100644 index 2beecf9..0000000 --- a/owl-bot-staging/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 -grpc-google-iam-v1==0.12.4 diff --git a/owl-bot-staging/v1/testing/constraints-3.8.txt b/owl-bot-staging/v1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa..0000000 --- a/owl-bot-staging/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v1/testing/constraints-3.9.txt b/owl-bot-staging/v1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa..0000000 --- a/owl-bot-staging/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py deleted file mode 100644 index 231bc12..0000000 --- a/owl-bot-staging/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py deleted file mode 100644 index 231bc12..0000000 --- a/owl-bot-staging/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 231bc12..0000000 --- a/owl-bot-staging/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/__init__.py deleted file mode 100644 index 231bc12..0000000 --- a/owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/test_data_migration_service.py b/owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/test_data_migration_service.py deleted file mode 100644 index d1d3635..0000000 --- a/owl-bot-staging/v1/tests/unit/gapic/clouddms_v1/test_data_migration_service.py +++ /dev/null @@ -1,10874 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.clouddms_v1.services.data_migration_service import DataMigrationServiceAsyncClient -from google.cloud.clouddms_v1.services.data_migration_service import DataMigrationServiceClient -from google.cloud.clouddms_v1.services.data_migration_service import pagers -from google.cloud.clouddms_v1.services.data_migration_service import transports -from google.cloud.clouddms_v1.types import clouddms -from google.cloud.clouddms_v1.types import clouddms_resources -from google.cloud.clouddms_v1.types import conversionworkspace_resources -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DataMigrationServiceClient._get_default_mtls_endpoint(None) is None - assert DataMigrationServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DataMigrationServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DataMigrationServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DataMigrationServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DataMigrationServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataMigrationServiceClient, "grpc"), - (DataMigrationServiceAsyncClient, "grpc_asyncio"), -]) -def test_data_migration_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datamigration.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DataMigrationServiceGrpcTransport, "grpc"), - (transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_data_migration_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataMigrationServiceClient, "grpc"), - (DataMigrationServiceAsyncClient, "grpc_asyncio"), -]) -def test_data_migration_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datamigration.googleapis.com:443' - ) - - -def test_data_migration_service_client_get_transport_class(): - transport = DataMigrationServiceClient.get_transport_class() - available_transports = [ - transports.DataMigrationServiceGrpcTransport, - ] - assert transport in available_transports - - transport = DataMigrationServiceClient.get_transport_class("grpc") - assert transport == transports.DataMigrationServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc"), - (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(DataMigrationServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataMigrationServiceClient)) -@mock.patch.object(DataMigrationServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataMigrationServiceAsyncClient)) -def test_data_migration_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DataMigrationServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DataMigrationServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc", "true"), - (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc", "false"), - (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(DataMigrationServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataMigrationServiceClient)) -@mock.patch.object(DataMigrationServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataMigrationServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_data_migration_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DataMigrationServiceClient, DataMigrationServiceAsyncClient -]) -@mock.patch.object(DataMigrationServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataMigrationServiceClient)) -@mock.patch.object(DataMigrationServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataMigrationServiceAsyncClient)) -def test_data_migration_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc"), - (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_data_migration_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc", grpc_helpers), - (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_migration_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_data_migration_service_client_client_options_from_dict(): - with mock.patch('google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DataMigrationServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport, "grpc", grpc_helpers), - (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_migration_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "datamigration.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="datamigration.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.ListMigrationJobsRequest, - dict, -]) -def test_list_migration_jobs(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.ListMigrationJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_migration_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ListMigrationJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMigrationJobsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_migration_jobs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_jobs), - '__call__') as call: - client.list_migration_jobs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ListMigrationJobsRequest() - -@pytest.mark.asyncio -async def test_list_migration_jobs_async(transport: str = 'grpc_asyncio', request_type=clouddms.ListMigrationJobsRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListMigrationJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_migration_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ListMigrationJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMigrationJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_migration_jobs_async_from_dict(): - await test_list_migration_jobs_async(request_type=dict) - - -def test_list_migration_jobs_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ListMigrationJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_jobs), - '__call__') as call: - call.return_value = clouddms.ListMigrationJobsResponse() - client.list_migration_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_migration_jobs_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ListMigrationJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListMigrationJobsResponse()) - await client.list_migration_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_migration_jobs_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.ListMigrationJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_migration_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_migration_jobs_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_migration_jobs( - clouddms.ListMigrationJobsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_migration_jobs_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.ListMigrationJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListMigrationJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_migration_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_migration_jobs_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_migration_jobs( - clouddms.ListMigrationJobsRequest(), - parent='parent_value', - ) - - -def test_list_migration_jobs_pager(transport_name: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListMigrationJobsResponse( - migration_jobs=[ - clouddms_resources.MigrationJob(), - clouddms_resources.MigrationJob(), - clouddms_resources.MigrationJob(), - ], - next_page_token='abc', - ), - clouddms.ListMigrationJobsResponse( - migration_jobs=[], - next_page_token='def', - ), - clouddms.ListMigrationJobsResponse( - migration_jobs=[ - clouddms_resources.MigrationJob(), - ], - next_page_token='ghi', - ), - clouddms.ListMigrationJobsResponse( - migration_jobs=[ - clouddms_resources.MigrationJob(), - clouddms_resources.MigrationJob(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_migration_jobs(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, clouddms_resources.MigrationJob) - for i in results) -def test_list_migration_jobs_pages(transport_name: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListMigrationJobsResponse( - migration_jobs=[ - clouddms_resources.MigrationJob(), - clouddms_resources.MigrationJob(), - clouddms_resources.MigrationJob(), - ], - next_page_token='abc', - ), - clouddms.ListMigrationJobsResponse( - migration_jobs=[], - next_page_token='def', - ), - clouddms.ListMigrationJobsResponse( - migration_jobs=[ - clouddms_resources.MigrationJob(), - ], - next_page_token='ghi', - ), - clouddms.ListMigrationJobsResponse( - migration_jobs=[ - clouddms_resources.MigrationJob(), - clouddms_resources.MigrationJob(), - ], - ), - RuntimeError, - ) - pages = list(client.list_migration_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_migration_jobs_async_pager(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListMigrationJobsResponse( - migration_jobs=[ - clouddms_resources.MigrationJob(), - clouddms_resources.MigrationJob(), - clouddms_resources.MigrationJob(), - ], - next_page_token='abc', - ), - clouddms.ListMigrationJobsResponse( - migration_jobs=[], - next_page_token='def', - ), - clouddms.ListMigrationJobsResponse( - migration_jobs=[ - clouddms_resources.MigrationJob(), - ], - next_page_token='ghi', - ), - clouddms.ListMigrationJobsResponse( - migration_jobs=[ - clouddms_resources.MigrationJob(), - clouddms_resources.MigrationJob(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_migration_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, clouddms_resources.MigrationJob) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_migration_jobs_async_pages(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListMigrationJobsResponse( - migration_jobs=[ - clouddms_resources.MigrationJob(), - clouddms_resources.MigrationJob(), - clouddms_resources.MigrationJob(), - ], - next_page_token='abc', - ), - clouddms.ListMigrationJobsResponse( - migration_jobs=[], - next_page_token='def', - ), - clouddms.ListMigrationJobsResponse( - migration_jobs=[ - clouddms_resources.MigrationJob(), - ], - next_page_token='ghi', - ), - clouddms.ListMigrationJobsResponse( - migration_jobs=[ - clouddms_resources.MigrationJob(), - clouddms_resources.MigrationJob(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_migration_jobs(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - clouddms.GetMigrationJobRequest, - dict, -]) -def test_get_migration_job(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms_resources.MigrationJob( - name='name_value', - display_name='display_name_value', - state=clouddms_resources.MigrationJob.State.MAINTENANCE, - phase=clouddms_resources.MigrationJob.Phase.FULL_DUMP, - type_=clouddms_resources.MigrationJob.Type.ONE_TIME, - dump_path='dump_path_value', - source='source_value', - destination='destination_value', - filter='filter_value', - cmek_key_name='cmek_key_name_value', - reverse_ssh_connectivity=clouddms_resources.ReverseSshConnectivity(vm_ip='vm_ip_value'), - ) - response = client.get_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.GetMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clouddms_resources.MigrationJob) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.state == clouddms_resources.MigrationJob.State.MAINTENANCE - assert response.phase == clouddms_resources.MigrationJob.Phase.FULL_DUMP - assert response.type_ == clouddms_resources.MigrationJob.Type.ONE_TIME - assert response.dump_path == 'dump_path_value' - assert response.source == 'source_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.cmek_key_name == 'cmek_key_name_value' - - -def test_get_migration_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_job), - '__call__') as call: - client.get_migration_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.GetMigrationJobRequest() - -@pytest.mark.asyncio -async def test_get_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.GetMigrationJobRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.MigrationJob( - name='name_value', - display_name='display_name_value', - state=clouddms_resources.MigrationJob.State.MAINTENANCE, - phase=clouddms_resources.MigrationJob.Phase.FULL_DUMP, - type_=clouddms_resources.MigrationJob.Type.ONE_TIME, - dump_path='dump_path_value', - source='source_value', - destination='destination_value', - filter='filter_value', - cmek_key_name='cmek_key_name_value', - )) - response = await client.get_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.GetMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clouddms_resources.MigrationJob) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.state == clouddms_resources.MigrationJob.State.MAINTENANCE - assert response.phase == clouddms_resources.MigrationJob.Phase.FULL_DUMP - assert response.type_ == clouddms_resources.MigrationJob.Type.ONE_TIME - assert response.dump_path == 'dump_path_value' - assert response.source == 'source_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.cmek_key_name == 'cmek_key_name_value' - - -@pytest.mark.asyncio -async def test_get_migration_job_async_from_dict(): - await test_get_migration_job_async(request_type=dict) - - -def test_get_migration_job_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.GetMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_job), - '__call__') as call: - call.return_value = clouddms_resources.MigrationJob() - client.get_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_migration_job_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.GetMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.MigrationJob()) - await client.get_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_migration_job_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms_resources.MigrationJob() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_migration_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_migration_job_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_migration_job( - clouddms.GetMigrationJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_migration_job_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms_resources.MigrationJob() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.MigrationJob()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_migration_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_migration_job_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_migration_job( - clouddms.GetMigrationJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.CreateMigrationJobRequest, - dict, -]) -def test_create_migration_job(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.CreateMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_migration_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_job), - '__call__') as call: - client.create_migration_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.CreateMigrationJobRequest() - -@pytest.mark.asyncio -async def test_create_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.CreateMigrationJobRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.CreateMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_migration_job_async_from_dict(): - await test_create_migration_job_async(request_type=dict) - - -def test_create_migration_job_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.CreateMigrationJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_migration_job_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.CreateMigrationJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_migration_job_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_migration_job( - parent='parent_value', - migration_job=clouddms_resources.MigrationJob(name='name_value'), - migration_job_id='migration_job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].migration_job - mock_val = clouddms_resources.MigrationJob(name='name_value') - assert arg == mock_val - arg = args[0].migration_job_id - mock_val = 'migration_job_id_value' - assert arg == mock_val - - -def test_create_migration_job_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_migration_job( - clouddms.CreateMigrationJobRequest(), - parent='parent_value', - migration_job=clouddms_resources.MigrationJob(name='name_value'), - migration_job_id='migration_job_id_value', - ) - -@pytest.mark.asyncio -async def test_create_migration_job_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_migration_job( - parent='parent_value', - migration_job=clouddms_resources.MigrationJob(name='name_value'), - migration_job_id='migration_job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].migration_job - mock_val = clouddms_resources.MigrationJob(name='name_value') - assert arg == mock_val - arg = args[0].migration_job_id - mock_val = 'migration_job_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_migration_job_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_migration_job( - clouddms.CreateMigrationJobRequest(), - parent='parent_value', - migration_job=clouddms_resources.MigrationJob(name='name_value'), - migration_job_id='migration_job_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.UpdateMigrationJobRequest, - dict, -]) -def test_update_migration_job(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.UpdateMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_migration_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_migration_job), - '__call__') as call: - client.update_migration_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.UpdateMigrationJobRequest() - -@pytest.mark.asyncio -async def test_update_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.UpdateMigrationJobRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.UpdateMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_migration_job_async_from_dict(): - await test_update_migration_job_async(request_type=dict) - - -def test_update_migration_job_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.UpdateMigrationJobRequest() - - request.migration_job.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_migration_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'migration_job.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_migration_job_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.UpdateMigrationJobRequest() - - request.migration_job.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_migration_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'migration_job.name=name_value', - ) in kw['metadata'] - - -def test_update_migration_job_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_migration_job( - migration_job=clouddms_resources.MigrationJob(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].migration_job - mock_val = clouddms_resources.MigrationJob(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_migration_job_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_migration_job( - clouddms.UpdateMigrationJobRequest(), - migration_job=clouddms_resources.MigrationJob(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_migration_job_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_migration_job( - migration_job=clouddms_resources.MigrationJob(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].migration_job - mock_val = clouddms_resources.MigrationJob(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_migration_job_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_migration_job( - clouddms.UpdateMigrationJobRequest(), - migration_job=clouddms_resources.MigrationJob(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.DeleteMigrationJobRequest, - dict, -]) -def test_delete_migration_job(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DeleteMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_migration_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_job), - '__call__') as call: - client.delete_migration_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DeleteMigrationJobRequest() - -@pytest.mark.asyncio -async def test_delete_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.DeleteMigrationJobRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DeleteMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_migration_job_async_from_dict(): - await test_delete_migration_job_async(request_type=dict) - - -def test_delete_migration_job_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.DeleteMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_migration_job_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.DeleteMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_migration_job_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_migration_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_migration_job_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_migration_job( - clouddms.DeleteMigrationJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_migration_job_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_migration_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_migration_job_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_migration_job( - clouddms.DeleteMigrationJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.StartMigrationJobRequest, - dict, -]) -def test_start_migration_job(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.start_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.StartMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_start_migration_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_job), - '__call__') as call: - client.start_migration_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.StartMigrationJobRequest() - -@pytest.mark.asyncio -async def test_start_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.StartMigrationJobRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.start_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.StartMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_start_migration_job_async_from_dict(): - await test_start_migration_job_async(request_type=dict) - - -def test_start_migration_job_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.StartMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.start_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_start_migration_job_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.StartMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.start_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clouddms.StopMigrationJobRequest, - dict, -]) -def test_stop_migration_job(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.stop_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.StopMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_stop_migration_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_migration_job), - '__call__') as call: - client.stop_migration_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.StopMigrationJobRequest() - -@pytest.mark.asyncio -async def test_stop_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.StopMigrationJobRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.stop_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.StopMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_stop_migration_job_async_from_dict(): - await test_stop_migration_job_async(request_type=dict) - - -def test_stop_migration_job_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.StopMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_migration_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.stop_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_stop_migration_job_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.StopMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_migration_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.stop_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clouddms.ResumeMigrationJobRequest, - dict, -]) -def test_resume_migration_job(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.resume_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ResumeMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_resume_migration_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_migration_job), - '__call__') as call: - client.resume_migration_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ResumeMigrationJobRequest() - -@pytest.mark.asyncio -async def test_resume_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.ResumeMigrationJobRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.resume_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ResumeMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_resume_migration_job_async_from_dict(): - await test_resume_migration_job_async(request_type=dict) - - -def test_resume_migration_job_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ResumeMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_migration_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.resume_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_resume_migration_job_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ResumeMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_migration_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.resume_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clouddms.PromoteMigrationJobRequest, - dict, -]) -def test_promote_migration_job(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.promote_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.promote_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.PromoteMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_promote_migration_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.promote_migration_job), - '__call__') as call: - client.promote_migration_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.PromoteMigrationJobRequest() - -@pytest.mark.asyncio -async def test_promote_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.PromoteMigrationJobRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.promote_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.promote_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.PromoteMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_promote_migration_job_async_from_dict(): - await test_promote_migration_job_async(request_type=dict) - - -def test_promote_migration_job_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.PromoteMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.promote_migration_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.promote_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_promote_migration_job_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.PromoteMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.promote_migration_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.promote_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clouddms.VerifyMigrationJobRequest, - dict, -]) -def test_verify_migration_job(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.verify_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.verify_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.VerifyMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_verify_migration_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.verify_migration_job), - '__call__') as call: - client.verify_migration_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.VerifyMigrationJobRequest() - -@pytest.mark.asyncio -async def test_verify_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.VerifyMigrationJobRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.verify_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.verify_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.VerifyMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_verify_migration_job_async_from_dict(): - await test_verify_migration_job_async(request_type=dict) - - -def test_verify_migration_job_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.VerifyMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.verify_migration_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.verify_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_verify_migration_job_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.VerifyMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.verify_migration_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.verify_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clouddms.RestartMigrationJobRequest, - dict, -]) -def test_restart_migration_job(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restart_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.restart_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.RestartMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_restart_migration_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restart_migration_job), - '__call__') as call: - client.restart_migration_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.RestartMigrationJobRequest() - -@pytest.mark.asyncio -async def test_restart_migration_job_async(transport: str = 'grpc_asyncio', request_type=clouddms.RestartMigrationJobRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restart_migration_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.restart_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.RestartMigrationJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_restart_migration_job_async_from_dict(): - await test_restart_migration_job_async(request_type=dict) - - -def test_restart_migration_job_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.RestartMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restart_migration_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.restart_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_restart_migration_job_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.RestartMigrationJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restart_migration_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.restart_migration_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clouddms.GenerateSshScriptRequest, - dict, -]) -def test_generate_ssh_script(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_ssh_script), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.SshScript( - script='script_value', - ) - response = client.generate_ssh_script(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.GenerateSshScriptRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clouddms.SshScript) - assert response.script == 'script_value' - - -def test_generate_ssh_script_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_ssh_script), - '__call__') as call: - client.generate_ssh_script() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.GenerateSshScriptRequest() - -@pytest.mark.asyncio -async def test_generate_ssh_script_async(transport: str = 'grpc_asyncio', request_type=clouddms.GenerateSshScriptRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_ssh_script), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.SshScript( - script='script_value', - )) - response = await client.generate_ssh_script(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.GenerateSshScriptRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clouddms.SshScript) - assert response.script == 'script_value' - - -@pytest.mark.asyncio -async def test_generate_ssh_script_async_from_dict(): - await test_generate_ssh_script_async(request_type=dict) - - -def test_generate_ssh_script_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.GenerateSshScriptRequest() - - request.migration_job = 'migration_job_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_ssh_script), - '__call__') as call: - call.return_value = clouddms.SshScript() - client.generate_ssh_script(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'migration_job=migration_job_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_generate_ssh_script_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.GenerateSshScriptRequest() - - request.migration_job = 'migration_job_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_ssh_script), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.SshScript()) - await client.generate_ssh_script(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'migration_job=migration_job_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clouddms.ListConnectionProfilesRequest, - dict, -]) -def test_list_connection_profiles(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connection_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.ListConnectionProfilesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_connection_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ListConnectionProfilesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConnectionProfilesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_connection_profiles_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connection_profiles), - '__call__') as call: - client.list_connection_profiles() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ListConnectionProfilesRequest() - -@pytest.mark.asyncio -async def test_list_connection_profiles_async(transport: str = 'grpc_asyncio', request_type=clouddms.ListConnectionProfilesRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connection_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConnectionProfilesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_connection_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ListConnectionProfilesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConnectionProfilesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_connection_profiles_async_from_dict(): - await test_list_connection_profiles_async(request_type=dict) - - -def test_list_connection_profiles_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ListConnectionProfilesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connection_profiles), - '__call__') as call: - call.return_value = clouddms.ListConnectionProfilesResponse() - client.list_connection_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_connection_profiles_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ListConnectionProfilesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connection_profiles), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConnectionProfilesResponse()) - await client.list_connection_profiles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_connection_profiles_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connection_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.ListConnectionProfilesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_connection_profiles( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_connection_profiles_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_connection_profiles( - clouddms.ListConnectionProfilesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_connection_profiles_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connection_profiles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.ListConnectionProfilesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConnectionProfilesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_connection_profiles( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_connection_profiles_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_connection_profiles( - clouddms.ListConnectionProfilesRequest(), - parent='parent_value', - ) - - -def test_list_connection_profiles_pager(transport_name: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connection_profiles), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListConnectionProfilesResponse( - connection_profiles=[ - clouddms_resources.ConnectionProfile(), - clouddms_resources.ConnectionProfile(), - clouddms_resources.ConnectionProfile(), - ], - next_page_token='abc', - ), - clouddms.ListConnectionProfilesResponse( - connection_profiles=[], - next_page_token='def', - ), - clouddms.ListConnectionProfilesResponse( - connection_profiles=[ - clouddms_resources.ConnectionProfile(), - ], - next_page_token='ghi', - ), - clouddms.ListConnectionProfilesResponse( - connection_profiles=[ - clouddms_resources.ConnectionProfile(), - clouddms_resources.ConnectionProfile(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_connection_profiles(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, clouddms_resources.ConnectionProfile) - for i in results) -def test_list_connection_profiles_pages(transport_name: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connection_profiles), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListConnectionProfilesResponse( - connection_profiles=[ - clouddms_resources.ConnectionProfile(), - clouddms_resources.ConnectionProfile(), - clouddms_resources.ConnectionProfile(), - ], - next_page_token='abc', - ), - clouddms.ListConnectionProfilesResponse( - connection_profiles=[], - next_page_token='def', - ), - clouddms.ListConnectionProfilesResponse( - connection_profiles=[ - clouddms_resources.ConnectionProfile(), - ], - next_page_token='ghi', - ), - clouddms.ListConnectionProfilesResponse( - connection_profiles=[ - clouddms_resources.ConnectionProfile(), - clouddms_resources.ConnectionProfile(), - ], - ), - RuntimeError, - ) - pages = list(client.list_connection_profiles(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_connection_profiles_async_pager(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connection_profiles), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListConnectionProfilesResponse( - connection_profiles=[ - clouddms_resources.ConnectionProfile(), - clouddms_resources.ConnectionProfile(), - clouddms_resources.ConnectionProfile(), - ], - next_page_token='abc', - ), - clouddms.ListConnectionProfilesResponse( - connection_profiles=[], - next_page_token='def', - ), - clouddms.ListConnectionProfilesResponse( - connection_profiles=[ - clouddms_resources.ConnectionProfile(), - ], - next_page_token='ghi', - ), - clouddms.ListConnectionProfilesResponse( - connection_profiles=[ - clouddms_resources.ConnectionProfile(), - clouddms_resources.ConnectionProfile(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_connection_profiles(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, clouddms_resources.ConnectionProfile) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_connection_profiles_async_pages(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connection_profiles), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListConnectionProfilesResponse( - connection_profiles=[ - clouddms_resources.ConnectionProfile(), - clouddms_resources.ConnectionProfile(), - clouddms_resources.ConnectionProfile(), - ], - next_page_token='abc', - ), - clouddms.ListConnectionProfilesResponse( - connection_profiles=[], - next_page_token='def', - ), - clouddms.ListConnectionProfilesResponse( - connection_profiles=[ - clouddms_resources.ConnectionProfile(), - ], - next_page_token='ghi', - ), - clouddms.ListConnectionProfilesResponse( - connection_profiles=[ - clouddms_resources.ConnectionProfile(), - clouddms_resources.ConnectionProfile(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_connection_profiles(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - clouddms.GetConnectionProfileRequest, - dict, -]) -def test_get_connection_profile(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms_resources.ConnectionProfile( - name='name_value', - state=clouddms_resources.ConnectionProfile.State.DRAFT, - display_name='display_name_value', - provider=clouddms_resources.DatabaseProvider.CLOUDSQL, - mysql=clouddms_resources.MySqlConnectionProfile(host='host_value'), - ) - response = client.get_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.GetConnectionProfileRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clouddms_resources.ConnectionProfile) - assert response.name == 'name_value' - assert response.state == clouddms_resources.ConnectionProfile.State.DRAFT - assert response.display_name == 'display_name_value' - assert response.provider == clouddms_resources.DatabaseProvider.CLOUDSQL - - -def test_get_connection_profile_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection_profile), - '__call__') as call: - client.get_connection_profile() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.GetConnectionProfileRequest() - -@pytest.mark.asyncio -async def test_get_connection_profile_async(transport: str = 'grpc_asyncio', request_type=clouddms.GetConnectionProfileRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.ConnectionProfile( - name='name_value', - state=clouddms_resources.ConnectionProfile.State.DRAFT, - display_name='display_name_value', - provider=clouddms_resources.DatabaseProvider.CLOUDSQL, - )) - response = await client.get_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.GetConnectionProfileRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clouddms_resources.ConnectionProfile) - assert response.name == 'name_value' - assert response.state == clouddms_resources.ConnectionProfile.State.DRAFT - assert response.display_name == 'display_name_value' - assert response.provider == clouddms_resources.DatabaseProvider.CLOUDSQL - - -@pytest.mark.asyncio -async def test_get_connection_profile_async_from_dict(): - await test_get_connection_profile_async(request_type=dict) - - -def test_get_connection_profile_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.GetConnectionProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection_profile), - '__call__') as call: - call.return_value = clouddms_resources.ConnectionProfile() - client.get_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_connection_profile_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.GetConnectionProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection_profile), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.ConnectionProfile()) - await client.get_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_connection_profile_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms_resources.ConnectionProfile() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_connection_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_connection_profile_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_connection_profile( - clouddms.GetConnectionProfileRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_connection_profile_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms_resources.ConnectionProfile() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.ConnectionProfile()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_connection_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_connection_profile_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_connection_profile( - clouddms.GetConnectionProfileRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.CreateConnectionProfileRequest, - dict, -]) -def test_create_connection_profile(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.CreateConnectionProfileRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_connection_profile_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection_profile), - '__call__') as call: - client.create_connection_profile() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.CreateConnectionProfileRequest() - -@pytest.mark.asyncio -async def test_create_connection_profile_async(transport: str = 'grpc_asyncio', request_type=clouddms.CreateConnectionProfileRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.CreateConnectionProfileRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_connection_profile_async_from_dict(): - await test_create_connection_profile_async(request_type=dict) - - -def test_create_connection_profile_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.CreateConnectionProfileRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection_profile), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_connection_profile_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.CreateConnectionProfileRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection_profile), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_connection_profile_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_connection_profile( - parent='parent_value', - connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), - connection_profile_id='connection_profile_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].connection_profile - mock_val = clouddms_resources.ConnectionProfile(name='name_value') - assert arg == mock_val - arg = args[0].connection_profile_id - mock_val = 'connection_profile_id_value' - assert arg == mock_val - - -def test_create_connection_profile_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_connection_profile( - clouddms.CreateConnectionProfileRequest(), - parent='parent_value', - connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), - connection_profile_id='connection_profile_id_value', - ) - -@pytest.mark.asyncio -async def test_create_connection_profile_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_connection_profile( - parent='parent_value', - connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), - connection_profile_id='connection_profile_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].connection_profile - mock_val = clouddms_resources.ConnectionProfile(name='name_value') - assert arg == mock_val - arg = args[0].connection_profile_id - mock_val = 'connection_profile_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_connection_profile_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_connection_profile( - clouddms.CreateConnectionProfileRequest(), - parent='parent_value', - connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), - connection_profile_id='connection_profile_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.UpdateConnectionProfileRequest, - dict, -]) -def test_update_connection_profile(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.UpdateConnectionProfileRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_connection_profile_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection_profile), - '__call__') as call: - client.update_connection_profile() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.UpdateConnectionProfileRequest() - -@pytest.mark.asyncio -async def test_update_connection_profile_async(transport: str = 'grpc_asyncio', request_type=clouddms.UpdateConnectionProfileRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.UpdateConnectionProfileRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_connection_profile_async_from_dict(): - await test_update_connection_profile_async(request_type=dict) - - -def test_update_connection_profile_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.UpdateConnectionProfileRequest() - - request.connection_profile.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection_profile), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'connection_profile.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_connection_profile_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.UpdateConnectionProfileRequest() - - request.connection_profile.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection_profile), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'connection_profile.name=name_value', - ) in kw['metadata'] - - -def test_update_connection_profile_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_connection_profile( - connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].connection_profile - mock_val = clouddms_resources.ConnectionProfile(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_connection_profile_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_connection_profile( - clouddms.UpdateConnectionProfileRequest(), - connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_connection_profile_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_connection_profile( - connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].connection_profile - mock_val = clouddms_resources.ConnectionProfile(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_connection_profile_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_connection_profile( - clouddms.UpdateConnectionProfileRequest(), - connection_profile=clouddms_resources.ConnectionProfile(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.DeleteConnectionProfileRequest, - dict, -]) -def test_delete_connection_profile(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DeleteConnectionProfileRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_connection_profile_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection_profile), - '__call__') as call: - client.delete_connection_profile() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DeleteConnectionProfileRequest() - -@pytest.mark.asyncio -async def test_delete_connection_profile_async(transport: str = 'grpc_asyncio', request_type=clouddms.DeleteConnectionProfileRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DeleteConnectionProfileRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_connection_profile_async_from_dict(): - await test_delete_connection_profile_async(request_type=dict) - - -def test_delete_connection_profile_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.DeleteConnectionProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection_profile), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_connection_profile_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.DeleteConnectionProfileRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection_profile), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_connection_profile(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_connection_profile_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_connection_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_connection_profile_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_connection_profile( - clouddms.DeleteConnectionProfileRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_connection_profile_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection_profile), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_connection_profile( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_connection_profile_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_connection_profile( - clouddms.DeleteConnectionProfileRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.CreatePrivateConnectionRequest, - dict, -]) -def test_create_private_connection(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_private_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_private_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.CreatePrivateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_private_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_private_connection), - '__call__') as call: - client.create_private_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.CreatePrivateConnectionRequest() - -@pytest.mark.asyncio -async def test_create_private_connection_async(transport: str = 'grpc_asyncio', request_type=clouddms.CreatePrivateConnectionRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_private_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_private_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.CreatePrivateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_private_connection_async_from_dict(): - await test_create_private_connection_async(request_type=dict) - - -def test_create_private_connection_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.CreatePrivateConnectionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_private_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_private_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_private_connection_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.CreatePrivateConnectionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_private_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_private_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_private_connection_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_private_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_private_connection( - parent='parent_value', - private_connection=clouddms_resources.PrivateConnection(name='name_value'), - private_connection_id='private_connection_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].private_connection - mock_val = clouddms_resources.PrivateConnection(name='name_value') - assert arg == mock_val - arg = args[0].private_connection_id - mock_val = 'private_connection_id_value' - assert arg == mock_val - - -def test_create_private_connection_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_private_connection( - clouddms.CreatePrivateConnectionRequest(), - parent='parent_value', - private_connection=clouddms_resources.PrivateConnection(name='name_value'), - private_connection_id='private_connection_id_value', - ) - -@pytest.mark.asyncio -async def test_create_private_connection_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_private_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_private_connection( - parent='parent_value', - private_connection=clouddms_resources.PrivateConnection(name='name_value'), - private_connection_id='private_connection_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].private_connection - mock_val = clouddms_resources.PrivateConnection(name='name_value') - assert arg == mock_val - arg = args[0].private_connection_id - mock_val = 'private_connection_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_private_connection_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_private_connection( - clouddms.CreatePrivateConnectionRequest(), - parent='parent_value', - private_connection=clouddms_resources.PrivateConnection(name='name_value'), - private_connection_id='private_connection_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.GetPrivateConnectionRequest, - dict, -]) -def test_get_private_connection(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_private_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms_resources.PrivateConnection( - name='name_value', - display_name='display_name_value', - state=clouddms_resources.PrivateConnection.State.CREATING, - vpc_peering_config=clouddms_resources.VpcPeeringConfig(vpc_name='vpc_name_value'), - ) - response = client.get_private_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.GetPrivateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clouddms_resources.PrivateConnection) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.state == clouddms_resources.PrivateConnection.State.CREATING - - -def test_get_private_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_private_connection), - '__call__') as call: - client.get_private_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.GetPrivateConnectionRequest() - -@pytest.mark.asyncio -async def test_get_private_connection_async(transport: str = 'grpc_asyncio', request_type=clouddms.GetPrivateConnectionRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_private_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.PrivateConnection( - name='name_value', - display_name='display_name_value', - state=clouddms_resources.PrivateConnection.State.CREATING, - )) - response = await client.get_private_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.GetPrivateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clouddms_resources.PrivateConnection) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.state == clouddms_resources.PrivateConnection.State.CREATING - - -@pytest.mark.asyncio -async def test_get_private_connection_async_from_dict(): - await test_get_private_connection_async(request_type=dict) - - -def test_get_private_connection_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.GetPrivateConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_private_connection), - '__call__') as call: - call.return_value = clouddms_resources.PrivateConnection() - client.get_private_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_private_connection_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.GetPrivateConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_private_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.PrivateConnection()) - await client.get_private_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_private_connection_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_private_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms_resources.PrivateConnection() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_private_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_private_connection_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_private_connection( - clouddms.GetPrivateConnectionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_private_connection_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_private_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms_resources.PrivateConnection() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms_resources.PrivateConnection()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_private_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_private_connection_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_private_connection( - clouddms.GetPrivateConnectionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.ListPrivateConnectionsRequest, - dict, -]) -def test_list_private_connections(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_private_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.ListPrivateConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_private_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ListPrivateConnectionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPrivateConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_private_connections_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_private_connections), - '__call__') as call: - client.list_private_connections() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ListPrivateConnectionsRequest() - -@pytest.mark.asyncio -async def test_list_private_connections_async(transport: str = 'grpc_asyncio', request_type=clouddms.ListPrivateConnectionsRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_private_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListPrivateConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_private_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ListPrivateConnectionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPrivateConnectionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_private_connections_async_from_dict(): - await test_list_private_connections_async(request_type=dict) - - -def test_list_private_connections_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ListPrivateConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_private_connections), - '__call__') as call: - call.return_value = clouddms.ListPrivateConnectionsResponse() - client.list_private_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_private_connections_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ListPrivateConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_private_connections), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListPrivateConnectionsResponse()) - await client.list_private_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_private_connections_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_private_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.ListPrivateConnectionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_private_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_private_connections_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_private_connections( - clouddms.ListPrivateConnectionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_private_connections_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_private_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.ListPrivateConnectionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListPrivateConnectionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_private_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_private_connections_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_private_connections( - clouddms.ListPrivateConnectionsRequest(), - parent='parent_value', - ) - - -def test_list_private_connections_pager(transport_name: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_private_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListPrivateConnectionsResponse( - private_connections=[ - clouddms_resources.PrivateConnection(), - clouddms_resources.PrivateConnection(), - clouddms_resources.PrivateConnection(), - ], - next_page_token='abc', - ), - clouddms.ListPrivateConnectionsResponse( - private_connections=[], - next_page_token='def', - ), - clouddms.ListPrivateConnectionsResponse( - private_connections=[ - clouddms_resources.PrivateConnection(), - ], - next_page_token='ghi', - ), - clouddms.ListPrivateConnectionsResponse( - private_connections=[ - clouddms_resources.PrivateConnection(), - clouddms_resources.PrivateConnection(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_private_connections(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, clouddms_resources.PrivateConnection) - for i in results) -def test_list_private_connections_pages(transport_name: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_private_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListPrivateConnectionsResponse( - private_connections=[ - clouddms_resources.PrivateConnection(), - clouddms_resources.PrivateConnection(), - clouddms_resources.PrivateConnection(), - ], - next_page_token='abc', - ), - clouddms.ListPrivateConnectionsResponse( - private_connections=[], - next_page_token='def', - ), - clouddms.ListPrivateConnectionsResponse( - private_connections=[ - clouddms_resources.PrivateConnection(), - ], - next_page_token='ghi', - ), - clouddms.ListPrivateConnectionsResponse( - private_connections=[ - clouddms_resources.PrivateConnection(), - clouddms_resources.PrivateConnection(), - ], - ), - RuntimeError, - ) - pages = list(client.list_private_connections(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_private_connections_async_pager(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_private_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListPrivateConnectionsResponse( - private_connections=[ - clouddms_resources.PrivateConnection(), - clouddms_resources.PrivateConnection(), - clouddms_resources.PrivateConnection(), - ], - next_page_token='abc', - ), - clouddms.ListPrivateConnectionsResponse( - private_connections=[], - next_page_token='def', - ), - clouddms.ListPrivateConnectionsResponse( - private_connections=[ - clouddms_resources.PrivateConnection(), - ], - next_page_token='ghi', - ), - clouddms.ListPrivateConnectionsResponse( - private_connections=[ - clouddms_resources.PrivateConnection(), - clouddms_resources.PrivateConnection(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_private_connections(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, clouddms_resources.PrivateConnection) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_private_connections_async_pages(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_private_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListPrivateConnectionsResponse( - private_connections=[ - clouddms_resources.PrivateConnection(), - clouddms_resources.PrivateConnection(), - clouddms_resources.PrivateConnection(), - ], - next_page_token='abc', - ), - clouddms.ListPrivateConnectionsResponse( - private_connections=[], - next_page_token='def', - ), - clouddms.ListPrivateConnectionsResponse( - private_connections=[ - clouddms_resources.PrivateConnection(), - ], - next_page_token='ghi', - ), - clouddms.ListPrivateConnectionsResponse( - private_connections=[ - clouddms_resources.PrivateConnection(), - clouddms_resources.PrivateConnection(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_private_connections(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - clouddms.DeletePrivateConnectionRequest, - dict, -]) -def test_delete_private_connection(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_private_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_private_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DeletePrivateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_private_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_private_connection), - '__call__') as call: - client.delete_private_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DeletePrivateConnectionRequest() - -@pytest.mark.asyncio -async def test_delete_private_connection_async(transport: str = 'grpc_asyncio', request_type=clouddms.DeletePrivateConnectionRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_private_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_private_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DeletePrivateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_private_connection_async_from_dict(): - await test_delete_private_connection_async(request_type=dict) - - -def test_delete_private_connection_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.DeletePrivateConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_private_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_private_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_private_connection_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.DeletePrivateConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_private_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_private_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_private_connection_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_private_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_private_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_private_connection_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_private_connection( - clouddms.DeletePrivateConnectionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_private_connection_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_private_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_private_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_private_connection_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_private_connection( - clouddms.DeletePrivateConnectionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.GetConversionWorkspaceRequest, - dict, -]) -def test_get_conversion_workspace(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = conversionworkspace_resources.ConversionWorkspace( - name='name_value', - has_uncommitted_changes=True, - latest_commit_id='latest_commit_id_value', - display_name='display_name_value', - ) - response = client.get_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.GetConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, conversionworkspace_resources.ConversionWorkspace) - assert response.name == 'name_value' - assert response.has_uncommitted_changes is True - assert response.latest_commit_id == 'latest_commit_id_value' - assert response.display_name == 'display_name_value' - - -def test_get_conversion_workspace_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_conversion_workspace), - '__call__') as call: - client.get_conversion_workspace() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.GetConversionWorkspaceRequest() - -@pytest.mark.asyncio -async def test_get_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.GetConversionWorkspaceRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.ConversionWorkspace( - name='name_value', - has_uncommitted_changes=True, - latest_commit_id='latest_commit_id_value', - display_name='display_name_value', - )) - response = await client.get_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.GetConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, conversionworkspace_resources.ConversionWorkspace) - assert response.name == 'name_value' - assert response.has_uncommitted_changes is True - assert response.latest_commit_id == 'latest_commit_id_value' - assert response.display_name == 'display_name_value' - - -@pytest.mark.asyncio -async def test_get_conversion_workspace_async_from_dict(): - await test_get_conversion_workspace_async(request_type=dict) - - -def test_get_conversion_workspace_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.GetConversionWorkspaceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_conversion_workspace), - '__call__') as call: - call.return_value = conversionworkspace_resources.ConversionWorkspace() - client.get_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_conversion_workspace_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.GetConversionWorkspaceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_conversion_workspace), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.ConversionWorkspace()) - await client.get_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_conversion_workspace_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = conversionworkspace_resources.ConversionWorkspace() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_conversion_workspace( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_conversion_workspace_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_conversion_workspace( - clouddms.GetConversionWorkspaceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_conversion_workspace_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = conversionworkspace_resources.ConversionWorkspace() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(conversionworkspace_resources.ConversionWorkspace()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_conversion_workspace( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_conversion_workspace_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_conversion_workspace( - clouddms.GetConversionWorkspaceRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.ListConversionWorkspacesRequest, - dict, -]) -def test_list_conversion_workspaces(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_conversion_workspaces), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.ListConversionWorkspacesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_conversion_workspaces(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ListConversionWorkspacesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConversionWorkspacesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_conversion_workspaces_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_conversion_workspaces), - '__call__') as call: - client.list_conversion_workspaces() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ListConversionWorkspacesRequest() - -@pytest.mark.asyncio -async def test_list_conversion_workspaces_async(transport: str = 'grpc_asyncio', request_type=clouddms.ListConversionWorkspacesRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_conversion_workspaces), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConversionWorkspacesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_conversion_workspaces(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ListConversionWorkspacesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConversionWorkspacesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_conversion_workspaces_async_from_dict(): - await test_list_conversion_workspaces_async(request_type=dict) - - -def test_list_conversion_workspaces_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ListConversionWorkspacesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_conversion_workspaces), - '__call__') as call: - call.return_value = clouddms.ListConversionWorkspacesResponse() - client.list_conversion_workspaces(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_conversion_workspaces_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ListConversionWorkspacesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_conversion_workspaces), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConversionWorkspacesResponse()) - await client.list_conversion_workspaces(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_conversion_workspaces_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_conversion_workspaces), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.ListConversionWorkspacesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_conversion_workspaces( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_conversion_workspaces_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_conversion_workspaces( - clouddms.ListConversionWorkspacesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_conversion_workspaces_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_conversion_workspaces), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.ListConversionWorkspacesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.ListConversionWorkspacesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_conversion_workspaces( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_conversion_workspaces_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_conversion_workspaces( - clouddms.ListConversionWorkspacesRequest(), - parent='parent_value', - ) - - -def test_list_conversion_workspaces_pager(transport_name: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_conversion_workspaces), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[ - conversionworkspace_resources.ConversionWorkspace(), - conversionworkspace_resources.ConversionWorkspace(), - conversionworkspace_resources.ConversionWorkspace(), - ], - next_page_token='abc', - ), - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[], - next_page_token='def', - ), - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[ - conversionworkspace_resources.ConversionWorkspace(), - ], - next_page_token='ghi', - ), - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[ - conversionworkspace_resources.ConversionWorkspace(), - conversionworkspace_resources.ConversionWorkspace(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_conversion_workspaces(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, conversionworkspace_resources.ConversionWorkspace) - for i in results) -def test_list_conversion_workspaces_pages(transport_name: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_conversion_workspaces), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[ - conversionworkspace_resources.ConversionWorkspace(), - conversionworkspace_resources.ConversionWorkspace(), - conversionworkspace_resources.ConversionWorkspace(), - ], - next_page_token='abc', - ), - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[], - next_page_token='def', - ), - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[ - conversionworkspace_resources.ConversionWorkspace(), - ], - next_page_token='ghi', - ), - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[ - conversionworkspace_resources.ConversionWorkspace(), - conversionworkspace_resources.ConversionWorkspace(), - ], - ), - RuntimeError, - ) - pages = list(client.list_conversion_workspaces(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_conversion_workspaces_async_pager(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_conversion_workspaces), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[ - conversionworkspace_resources.ConversionWorkspace(), - conversionworkspace_resources.ConversionWorkspace(), - conversionworkspace_resources.ConversionWorkspace(), - ], - next_page_token='abc', - ), - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[], - next_page_token='def', - ), - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[ - conversionworkspace_resources.ConversionWorkspace(), - ], - next_page_token='ghi', - ), - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[ - conversionworkspace_resources.ConversionWorkspace(), - conversionworkspace_resources.ConversionWorkspace(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_conversion_workspaces(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, conversionworkspace_resources.ConversionWorkspace) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_conversion_workspaces_async_pages(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_conversion_workspaces), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[ - conversionworkspace_resources.ConversionWorkspace(), - conversionworkspace_resources.ConversionWorkspace(), - conversionworkspace_resources.ConversionWorkspace(), - ], - next_page_token='abc', - ), - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[], - next_page_token='def', - ), - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[ - conversionworkspace_resources.ConversionWorkspace(), - ], - next_page_token='ghi', - ), - clouddms.ListConversionWorkspacesResponse( - conversion_workspaces=[ - conversionworkspace_resources.ConversionWorkspace(), - conversionworkspace_resources.ConversionWorkspace(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_conversion_workspaces(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - clouddms.CreateConversionWorkspaceRequest, - dict, -]) -def test_create_conversion_workspace(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.CreateConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_conversion_workspace_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_conversion_workspace), - '__call__') as call: - client.create_conversion_workspace() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.CreateConversionWorkspaceRequest() - -@pytest.mark.asyncio -async def test_create_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.CreateConversionWorkspaceRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.CreateConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_conversion_workspace_async_from_dict(): - await test_create_conversion_workspace_async(request_type=dict) - - -def test_create_conversion_workspace_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.CreateConversionWorkspaceRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_conversion_workspace), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_conversion_workspace_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.CreateConversionWorkspaceRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_conversion_workspace), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_conversion_workspace_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_conversion_workspace( - parent='parent_value', - conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), - conversion_workspace_id='conversion_workspace_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].conversion_workspace - mock_val = conversionworkspace_resources.ConversionWorkspace(name='name_value') - assert arg == mock_val - arg = args[0].conversion_workspace_id - mock_val = 'conversion_workspace_id_value' - assert arg == mock_val - - -def test_create_conversion_workspace_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_conversion_workspace( - clouddms.CreateConversionWorkspaceRequest(), - parent='parent_value', - conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), - conversion_workspace_id='conversion_workspace_id_value', - ) - -@pytest.mark.asyncio -async def test_create_conversion_workspace_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_conversion_workspace( - parent='parent_value', - conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), - conversion_workspace_id='conversion_workspace_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].conversion_workspace - mock_val = conversionworkspace_resources.ConversionWorkspace(name='name_value') - assert arg == mock_val - arg = args[0].conversion_workspace_id - mock_val = 'conversion_workspace_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_conversion_workspace_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_conversion_workspace( - clouddms.CreateConversionWorkspaceRequest(), - parent='parent_value', - conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), - conversion_workspace_id='conversion_workspace_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.UpdateConversionWorkspaceRequest, - dict, -]) -def test_update_conversion_workspace(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.UpdateConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_conversion_workspace_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_conversion_workspace), - '__call__') as call: - client.update_conversion_workspace() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.UpdateConversionWorkspaceRequest() - -@pytest.mark.asyncio -async def test_update_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.UpdateConversionWorkspaceRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.UpdateConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_conversion_workspace_async_from_dict(): - await test_update_conversion_workspace_async(request_type=dict) - - -def test_update_conversion_workspace_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.UpdateConversionWorkspaceRequest() - - request.conversion_workspace.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_conversion_workspace), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'conversion_workspace.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_conversion_workspace_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.UpdateConversionWorkspaceRequest() - - request.conversion_workspace.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_conversion_workspace), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'conversion_workspace.name=name_value', - ) in kw['metadata'] - - -def test_update_conversion_workspace_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_conversion_workspace( - conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].conversion_workspace - mock_val = conversionworkspace_resources.ConversionWorkspace(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_conversion_workspace_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_conversion_workspace( - clouddms.UpdateConversionWorkspaceRequest(), - conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_conversion_workspace_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_conversion_workspace( - conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].conversion_workspace - mock_val = conversionworkspace_resources.ConversionWorkspace(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_conversion_workspace_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_conversion_workspace( - clouddms.UpdateConversionWorkspaceRequest(), - conversion_workspace=conversionworkspace_resources.ConversionWorkspace(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.DeleteConversionWorkspaceRequest, - dict, -]) -def test_delete_conversion_workspace(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DeleteConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_conversion_workspace_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_conversion_workspace), - '__call__') as call: - client.delete_conversion_workspace() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DeleteConversionWorkspaceRequest() - -@pytest.mark.asyncio -async def test_delete_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.DeleteConversionWorkspaceRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DeleteConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_conversion_workspace_async_from_dict(): - await test_delete_conversion_workspace_async(request_type=dict) - - -def test_delete_conversion_workspace_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.DeleteConversionWorkspaceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_conversion_workspace), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_conversion_workspace_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.DeleteConversionWorkspaceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_conversion_workspace), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_conversion_workspace_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_conversion_workspace( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_conversion_workspace_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_conversion_workspace( - clouddms.DeleteConversionWorkspaceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_conversion_workspace_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_conversion_workspace( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_conversion_workspace_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_conversion_workspace( - clouddms.DeleteConversionWorkspaceRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - clouddms.SeedConversionWorkspaceRequest, - dict, -]) -def test_seed_conversion_workspace(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.seed_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.seed_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.SeedConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_seed_conversion_workspace_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.seed_conversion_workspace), - '__call__') as call: - client.seed_conversion_workspace() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.SeedConversionWorkspaceRequest() - -@pytest.mark.asyncio -async def test_seed_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.SeedConversionWorkspaceRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.seed_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.seed_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.SeedConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_seed_conversion_workspace_async_from_dict(): - await test_seed_conversion_workspace_async(request_type=dict) - - -def test_seed_conversion_workspace_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.SeedConversionWorkspaceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.seed_conversion_workspace), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.seed_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_seed_conversion_workspace_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.SeedConversionWorkspaceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.seed_conversion_workspace), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.seed_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clouddms.ImportMappingRulesRequest, - dict, -]) -def test_import_mapping_rules(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_mapping_rules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.import_mapping_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ImportMappingRulesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_import_mapping_rules_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_mapping_rules), - '__call__') as call: - client.import_mapping_rules() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ImportMappingRulesRequest() - -@pytest.mark.asyncio -async def test_import_mapping_rules_async(transport: str = 'grpc_asyncio', request_type=clouddms.ImportMappingRulesRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_mapping_rules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.import_mapping_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ImportMappingRulesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_import_mapping_rules_async_from_dict(): - await test_import_mapping_rules_async(request_type=dict) - - -def test_import_mapping_rules_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ImportMappingRulesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_mapping_rules), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.import_mapping_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_import_mapping_rules_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ImportMappingRulesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_mapping_rules), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.import_mapping_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clouddms.ConvertConversionWorkspaceRequest, - dict, -]) -def test_convert_conversion_workspace(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.convert_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.convert_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ConvertConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_convert_conversion_workspace_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.convert_conversion_workspace), - '__call__') as call: - client.convert_conversion_workspace() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ConvertConversionWorkspaceRequest() - -@pytest.mark.asyncio -async def test_convert_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.ConvertConversionWorkspaceRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.convert_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.convert_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ConvertConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_convert_conversion_workspace_async_from_dict(): - await test_convert_conversion_workspace_async(request_type=dict) - - -def test_convert_conversion_workspace_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ConvertConversionWorkspaceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.convert_conversion_workspace), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.convert_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_convert_conversion_workspace_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ConvertConversionWorkspaceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.convert_conversion_workspace), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.convert_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clouddms.CommitConversionWorkspaceRequest, - dict, -]) -def test_commit_conversion_workspace(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.commit_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.CommitConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_commit_conversion_workspace_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit_conversion_workspace), - '__call__') as call: - client.commit_conversion_workspace() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.CommitConversionWorkspaceRequest() - -@pytest.mark.asyncio -async def test_commit_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.CommitConversionWorkspaceRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.commit_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.CommitConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_commit_conversion_workspace_async_from_dict(): - await test_commit_conversion_workspace_async(request_type=dict) - - -def test_commit_conversion_workspace_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.CommitConversionWorkspaceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit_conversion_workspace), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.commit_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_commit_conversion_workspace_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.CommitConversionWorkspaceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit_conversion_workspace), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.commit_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clouddms.RollbackConversionWorkspaceRequest, - dict, -]) -def test_rollback_conversion_workspace(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.rollback_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.RollbackConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_rollback_conversion_workspace_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback_conversion_workspace), - '__call__') as call: - client.rollback_conversion_workspace() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.RollbackConversionWorkspaceRequest() - -@pytest.mark.asyncio -async def test_rollback_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.RollbackConversionWorkspaceRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.rollback_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.RollbackConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_rollback_conversion_workspace_async_from_dict(): - await test_rollback_conversion_workspace_async(request_type=dict) - - -def test_rollback_conversion_workspace_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.RollbackConversionWorkspaceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback_conversion_workspace), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.rollback_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rollback_conversion_workspace_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.RollbackConversionWorkspaceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback_conversion_workspace), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.rollback_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clouddms.ApplyConversionWorkspaceRequest, - dict, -]) -def test_apply_conversion_workspace(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.apply_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.apply_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ApplyConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_apply_conversion_workspace_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.apply_conversion_workspace), - '__call__') as call: - client.apply_conversion_workspace() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ApplyConversionWorkspaceRequest() - -@pytest.mark.asyncio -async def test_apply_conversion_workspace_async(transport: str = 'grpc_asyncio', request_type=clouddms.ApplyConversionWorkspaceRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.apply_conversion_workspace), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.apply_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.ApplyConversionWorkspaceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_apply_conversion_workspace_async_from_dict(): - await test_apply_conversion_workspace_async(request_type=dict) - - -def test_apply_conversion_workspace_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ApplyConversionWorkspaceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.apply_conversion_workspace), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.apply_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_apply_conversion_workspace_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.ApplyConversionWorkspaceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.apply_conversion_workspace), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.apply_conversion_workspace(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clouddms.DescribeDatabaseEntitiesRequest, - dict, -]) -def test_describe_database_entities(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.describe_database_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.DescribeDatabaseEntitiesResponse( - next_page_token='next_page_token_value', - ) - response = client.describe_database_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DescribeDatabaseEntitiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.DescribeDatabaseEntitiesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_describe_database_entities_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.describe_database_entities), - '__call__') as call: - client.describe_database_entities() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DescribeDatabaseEntitiesRequest() - -@pytest.mark.asyncio -async def test_describe_database_entities_async(transport: str = 'grpc_asyncio', request_type=clouddms.DescribeDatabaseEntitiesRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.describe_database_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.DescribeDatabaseEntitiesResponse( - next_page_token='next_page_token_value', - )) - response = await client.describe_database_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DescribeDatabaseEntitiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.DescribeDatabaseEntitiesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_describe_database_entities_async_from_dict(): - await test_describe_database_entities_async(request_type=dict) - - -def test_describe_database_entities_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.DescribeDatabaseEntitiesRequest() - - request.conversion_workspace = 'conversion_workspace_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.describe_database_entities), - '__call__') as call: - call.return_value = clouddms.DescribeDatabaseEntitiesResponse() - client.describe_database_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'conversion_workspace=conversion_workspace_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_describe_database_entities_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.DescribeDatabaseEntitiesRequest() - - request.conversion_workspace = 'conversion_workspace_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.describe_database_entities), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.DescribeDatabaseEntitiesResponse()) - await client.describe_database_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'conversion_workspace=conversion_workspace_value', - ) in kw['metadata'] - - -def test_describe_database_entities_pager(transport_name: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.describe_database_entities), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[ - conversionworkspace_resources.DatabaseEntity(), - conversionworkspace_resources.DatabaseEntity(), - conversionworkspace_resources.DatabaseEntity(), - ], - next_page_token='abc', - ), - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[], - next_page_token='def', - ), - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[ - conversionworkspace_resources.DatabaseEntity(), - ], - next_page_token='ghi', - ), - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[ - conversionworkspace_resources.DatabaseEntity(), - conversionworkspace_resources.DatabaseEntity(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('conversion_workspace', ''), - )), - ) - pager = client.describe_database_entities(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, conversionworkspace_resources.DatabaseEntity) - for i in results) -def test_describe_database_entities_pages(transport_name: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.describe_database_entities), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[ - conversionworkspace_resources.DatabaseEntity(), - conversionworkspace_resources.DatabaseEntity(), - conversionworkspace_resources.DatabaseEntity(), - ], - next_page_token='abc', - ), - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[], - next_page_token='def', - ), - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[ - conversionworkspace_resources.DatabaseEntity(), - ], - next_page_token='ghi', - ), - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[ - conversionworkspace_resources.DatabaseEntity(), - conversionworkspace_resources.DatabaseEntity(), - ], - ), - RuntimeError, - ) - pages = list(client.describe_database_entities(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_describe_database_entities_async_pager(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.describe_database_entities), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[ - conversionworkspace_resources.DatabaseEntity(), - conversionworkspace_resources.DatabaseEntity(), - conversionworkspace_resources.DatabaseEntity(), - ], - next_page_token='abc', - ), - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[], - next_page_token='def', - ), - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[ - conversionworkspace_resources.DatabaseEntity(), - ], - next_page_token='ghi', - ), - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[ - conversionworkspace_resources.DatabaseEntity(), - conversionworkspace_resources.DatabaseEntity(), - ], - ), - RuntimeError, - ) - async_pager = await client.describe_database_entities(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, conversionworkspace_resources.DatabaseEntity) - for i in responses) - - -@pytest.mark.asyncio -async def test_describe_database_entities_async_pages(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.describe_database_entities), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[ - conversionworkspace_resources.DatabaseEntity(), - conversionworkspace_resources.DatabaseEntity(), - conversionworkspace_resources.DatabaseEntity(), - ], - next_page_token='abc', - ), - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[], - next_page_token='def', - ), - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[ - conversionworkspace_resources.DatabaseEntity(), - ], - next_page_token='ghi', - ), - clouddms.DescribeDatabaseEntitiesResponse( - database_entities=[ - conversionworkspace_resources.DatabaseEntity(), - conversionworkspace_resources.DatabaseEntity(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.describe_database_entities(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - clouddms.SearchBackgroundJobsRequest, - dict, -]) -def test_search_background_jobs(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_background_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.SearchBackgroundJobsResponse( - ) - response = client.search_background_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.SearchBackgroundJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clouddms.SearchBackgroundJobsResponse) - - -def test_search_background_jobs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_background_jobs), - '__call__') as call: - client.search_background_jobs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.SearchBackgroundJobsRequest() - -@pytest.mark.asyncio -async def test_search_background_jobs_async(transport: str = 'grpc_asyncio', request_type=clouddms.SearchBackgroundJobsRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_background_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.SearchBackgroundJobsResponse( - )) - response = await client.search_background_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.SearchBackgroundJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clouddms.SearchBackgroundJobsResponse) - - -@pytest.mark.asyncio -async def test_search_background_jobs_async_from_dict(): - await test_search_background_jobs_async(request_type=dict) - - -def test_search_background_jobs_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.SearchBackgroundJobsRequest() - - request.conversion_workspace = 'conversion_workspace_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_background_jobs), - '__call__') as call: - call.return_value = clouddms.SearchBackgroundJobsResponse() - client.search_background_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'conversion_workspace=conversion_workspace_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_search_background_jobs_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.SearchBackgroundJobsRequest() - - request.conversion_workspace = 'conversion_workspace_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_background_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.SearchBackgroundJobsResponse()) - await client.search_background_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'conversion_workspace=conversion_workspace_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clouddms.DescribeConversionWorkspaceRevisionsRequest, - dict, -]) -def test_describe_conversion_workspace_revisions(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.describe_conversion_workspace_revisions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.DescribeConversionWorkspaceRevisionsResponse( - ) - response = client.describe_conversion_workspace_revisions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DescribeConversionWorkspaceRevisionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clouddms.DescribeConversionWorkspaceRevisionsResponse) - - -def test_describe_conversion_workspace_revisions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.describe_conversion_workspace_revisions), - '__call__') as call: - client.describe_conversion_workspace_revisions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DescribeConversionWorkspaceRevisionsRequest() - -@pytest.mark.asyncio -async def test_describe_conversion_workspace_revisions_async(transport: str = 'grpc_asyncio', request_type=clouddms.DescribeConversionWorkspaceRevisionsRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.describe_conversion_workspace_revisions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.DescribeConversionWorkspaceRevisionsResponse( - )) - response = await client.describe_conversion_workspace_revisions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.DescribeConversionWorkspaceRevisionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clouddms.DescribeConversionWorkspaceRevisionsResponse) - - -@pytest.mark.asyncio -async def test_describe_conversion_workspace_revisions_async_from_dict(): - await test_describe_conversion_workspace_revisions_async(request_type=dict) - - -def test_describe_conversion_workspace_revisions_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.DescribeConversionWorkspaceRevisionsRequest() - - request.conversion_workspace = 'conversion_workspace_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.describe_conversion_workspace_revisions), - '__call__') as call: - call.return_value = clouddms.DescribeConversionWorkspaceRevisionsResponse() - client.describe_conversion_workspace_revisions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'conversion_workspace=conversion_workspace_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_describe_conversion_workspace_revisions_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.DescribeConversionWorkspaceRevisionsRequest() - - request.conversion_workspace = 'conversion_workspace_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.describe_conversion_workspace_revisions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.DescribeConversionWorkspaceRevisionsResponse()) - await client.describe_conversion_workspace_revisions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'conversion_workspace=conversion_workspace_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clouddms.FetchStaticIpsRequest, - dict, -]) -def test_fetch_static_ips(request_type, transport: str = 'grpc'): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_static_ips), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.FetchStaticIpsResponse( - static_ips=['static_ips_value'], - next_page_token='next_page_token_value', - ) - response = client.fetch_static_ips(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.FetchStaticIpsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.FetchStaticIpsPager) - assert response.static_ips == ['static_ips_value'] - assert response.next_page_token == 'next_page_token_value' - - -def test_fetch_static_ips_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_static_ips), - '__call__') as call: - client.fetch_static_ips() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.FetchStaticIpsRequest() - -@pytest.mark.asyncio -async def test_fetch_static_ips_async(transport: str = 'grpc_asyncio', request_type=clouddms.FetchStaticIpsRequest): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_static_ips), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clouddms.FetchStaticIpsResponse( - static_ips=['static_ips_value'], - next_page_token='next_page_token_value', - )) - response = await client.fetch_static_ips(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clouddms.FetchStaticIpsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.FetchStaticIpsAsyncPager) - assert response.static_ips == ['static_ips_value'] - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_fetch_static_ips_async_from_dict(): - await test_fetch_static_ips_async(request_type=dict) - - -def test_fetch_static_ips_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.FetchStaticIpsRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_static_ips), - '__call__') as call: - call.return_value = clouddms.FetchStaticIpsResponse() - client.fetch_static_ips(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_fetch_static_ips_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clouddms.FetchStaticIpsRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_static_ips), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.FetchStaticIpsResponse()) - await client.fetch_static_ips(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_fetch_static_ips_flattened(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_static_ips), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.FetchStaticIpsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.fetch_static_ips( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_fetch_static_ips_flattened_error(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_static_ips( - clouddms.FetchStaticIpsRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_fetch_static_ips_flattened_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_static_ips), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clouddms.FetchStaticIpsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clouddms.FetchStaticIpsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.fetch_static_ips( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_fetch_static_ips_flattened_error_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.fetch_static_ips( - clouddms.FetchStaticIpsRequest(), - name='name_value', - ) - - -def test_fetch_static_ips_pager(transport_name: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_static_ips), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.FetchStaticIpsResponse( - static_ips=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - clouddms.FetchStaticIpsResponse( - static_ips=[], - next_page_token='def', - ), - clouddms.FetchStaticIpsResponse( - static_ips=[ - str(), - ], - next_page_token='ghi', - ), - clouddms.FetchStaticIpsResponse( - static_ips=[ - str(), - str(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('name', ''), - )), - ) - pager = client.fetch_static_ips(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, str) - for i in results) -def test_fetch_static_ips_pages(transport_name: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_static_ips), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.FetchStaticIpsResponse( - static_ips=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - clouddms.FetchStaticIpsResponse( - static_ips=[], - next_page_token='def', - ), - clouddms.FetchStaticIpsResponse( - static_ips=[ - str(), - ], - next_page_token='ghi', - ), - clouddms.FetchStaticIpsResponse( - static_ips=[ - str(), - str(), - ], - ), - RuntimeError, - ) - pages = list(client.fetch_static_ips(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_fetch_static_ips_async_pager(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_static_ips), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.FetchStaticIpsResponse( - static_ips=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - clouddms.FetchStaticIpsResponse( - static_ips=[], - next_page_token='def', - ), - clouddms.FetchStaticIpsResponse( - static_ips=[ - str(), - ], - next_page_token='ghi', - ), - clouddms.FetchStaticIpsResponse( - static_ips=[ - str(), - str(), - ], - ), - RuntimeError, - ) - async_pager = await client.fetch_static_ips(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, str) - for i in responses) - - -@pytest.mark.asyncio -async def test_fetch_static_ips_async_pages(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_static_ips), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - clouddms.FetchStaticIpsResponse( - static_ips=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - clouddms.FetchStaticIpsResponse( - static_ips=[], - next_page_token='def', - ), - clouddms.FetchStaticIpsResponse( - static_ips=[ - str(), - ], - next_page_token='ghi', - ), - clouddms.FetchStaticIpsResponse( - static_ips=[ - str(), - str(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.fetch_static_ips(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataMigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataMigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataMigrationServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DataMigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataMigrationServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataMigrationServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DataMigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataMigrationServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataMigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DataMigrationServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataMigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DataMigrationServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DataMigrationServiceGrpcTransport, - transports.DataMigrationServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", -]) -def test_transport_kind(transport_name): - transport = DataMigrationServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DataMigrationServiceGrpcTransport, - ) - -def test_data_migration_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DataMigrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_data_migration_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DataMigrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_migration_jobs', - 'get_migration_job', - 'create_migration_job', - 'update_migration_job', - 'delete_migration_job', - 'start_migration_job', - 'stop_migration_job', - 'resume_migration_job', - 'promote_migration_job', - 'verify_migration_job', - 'restart_migration_job', - 'generate_ssh_script', - 'list_connection_profiles', - 'get_connection_profile', - 'create_connection_profile', - 'update_connection_profile', - 'delete_connection_profile', - 'create_private_connection', - 'get_private_connection', - 'list_private_connections', - 'delete_private_connection', - 'get_conversion_workspace', - 'list_conversion_workspaces', - 'create_conversion_workspace', - 'update_conversion_workspace', - 'delete_conversion_workspace', - 'seed_conversion_workspace', - 'import_mapping_rules', - 'convert_conversion_workspace', - 'commit_conversion_workspace', - 'rollback_conversion_workspace', - 'apply_conversion_workspace', - 'describe_database_entities', - 'search_background_jobs', - 'describe_conversion_workspace_revisions', - 'fetch_static_ips', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_data_migration_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataMigrationServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_data_migration_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataMigrationServiceTransport() - adc.assert_called_once() - - -def test_data_migration_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DataMigrationServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataMigrationServiceGrpcTransport, - transports.DataMigrationServiceGrpcAsyncIOTransport, - ], -) -def test_data_migration_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataMigrationServiceGrpcTransport, - transports.DataMigrationServiceGrpcAsyncIOTransport, - ], -) -def test_data_migration_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DataMigrationServiceGrpcTransport, grpc_helpers), - (transports.DataMigrationServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_data_migration_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "datamigration.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="datamigration.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DataMigrationServiceGrpcTransport, transports.DataMigrationServiceGrpcAsyncIOTransport]) -def test_data_migration_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_data_migration_service_host_no_port(transport_name): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datamigration.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datamigration.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_data_migration_service_host_with_port(transport_name): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datamigration.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datamigration.googleapis.com:8000' - ) - -def test_data_migration_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataMigrationServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_data_migration_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataMigrationServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataMigrationServiceGrpcTransport, transports.DataMigrationServiceGrpcAsyncIOTransport]) -def test_data_migration_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataMigrationServiceGrpcTransport, transports.DataMigrationServiceGrpcAsyncIOTransport]) -def test_data_migration_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_data_migration_service_grpc_lro_client(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_data_migration_service_grpc_lro_async_client(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_connection_profile_path(): - project = "squid" - location = "clam" - connection_profile = "whelk" - expected = "projects/{project}/locations/{location}/connectionProfiles/{connection_profile}".format(project=project, location=location, connection_profile=connection_profile, ) - actual = DataMigrationServiceClient.connection_profile_path(project, location, connection_profile) - assert expected == actual - - -def test_parse_connection_profile_path(): - expected = { - "project": "octopus", - "location": "oyster", - "connection_profile": "nudibranch", - } - path = DataMigrationServiceClient.connection_profile_path(**expected) - - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_connection_profile_path(path) - assert expected == actual - -def test_conversion_workspace_path(): - project = "cuttlefish" - location = "mussel" - conversion_workspace = "winkle" - expected = "projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}".format(project=project, location=location, conversion_workspace=conversion_workspace, ) - actual = DataMigrationServiceClient.conversion_workspace_path(project, location, conversion_workspace) - assert expected == actual - - -def test_parse_conversion_workspace_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "conversion_workspace": "abalone", - } - path = DataMigrationServiceClient.conversion_workspace_path(**expected) - - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_conversion_workspace_path(path) - assert expected == actual - -def test_migration_job_path(): - project = "squid" - location = "clam" - migration_job = "whelk" - expected = "projects/{project}/locations/{location}/migrationJobs/{migration_job}".format(project=project, location=location, migration_job=migration_job, ) - actual = DataMigrationServiceClient.migration_job_path(project, location, migration_job) - assert expected == actual - - -def test_parse_migration_job_path(): - expected = { - "project": "octopus", - "location": "oyster", - "migration_job": "nudibranch", - } - path = DataMigrationServiceClient.migration_job_path(**expected) - - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_migration_job_path(path) - assert expected == actual - -def test_networks_path(): - project = "cuttlefish" - network = "mussel" - expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) - actual = DataMigrationServiceClient.networks_path(project, network) - assert expected == actual - - -def test_parse_networks_path(): - expected = { - "project": "winkle", - "network": "nautilus", - } - path = DataMigrationServiceClient.networks_path(**expected) - - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_networks_path(path) - assert expected == actual - -def test_private_connection_path(): - project = "scallop" - location = "abalone" - private_connection = "squid" - expected = "projects/{project}/locations/{location}/privateConnections/{private_connection}".format(project=project, location=location, private_connection=private_connection, ) - actual = DataMigrationServiceClient.private_connection_path(project, location, private_connection) - assert expected == actual - - -def test_parse_private_connection_path(): - expected = { - "project": "clam", - "location": "whelk", - "private_connection": "octopus", - } - path = DataMigrationServiceClient.private_connection_path(**expected) - - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_private_connection_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DataMigrationServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = DataMigrationServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = DataMigrationServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = DataMigrationServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DataMigrationServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = DataMigrationServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = DataMigrationServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = DataMigrationServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DataMigrationServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = DataMigrationServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DataMigrationServiceTransport, '_prep_wrapped_messages') as prep: - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DataMigrationServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = DataMigrationServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_delete_operation(transport: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc"): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - -def test_get_iam_policy(transport: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -def test_test_iam_permissions(transport: str = "grpc"): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = DataMigrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DataMigrationServiceClient, transports.DataMigrationServiceGrpcTransport), - (DataMigrationServiceAsyncClient, transports.DataMigrationServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py rename to samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py diff --git a/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json index 94aa0fb..10d2ba4 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dms", - "version": "1.6.2" + "version": "0.1.0" }, "snippets": [ { @@ -19,31 +19,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_connection_profile", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.apply_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConnectionProfile", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ApplyConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "CreateConnectionProfile" + "shortName": "ApplyConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.CreateConnectionProfileRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "connection_profile", - "type": "google.cloud.clouddms_v1.types.ConnectionProfile" - }, - { - "name": "connection_profile_id", - "type": "str" + "type": "google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest" }, { "name": "retry", @@ -59,21 +47,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_connection_profile" + "shortName": "apply_conversion_workspace" }, - "description": "Sample for CreateConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_create_connection_profile_async.py", + "description": "Sample for ApplyConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_async", "segments": [ { - "end": 63, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 63, + "end": 56, "start": 27, "type": "SHORT" }, @@ -83,22 +71,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 53, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 60, - "start": 54, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 64, - "start": 61, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_create_connection_profile_async.py" + "title": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py" }, { "canonical": true, @@ -107,31 +95,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_connection_profile", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.apply_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConnectionProfile", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ApplyConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "CreateConnectionProfile" + "shortName": "ApplyConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.CreateConnectionProfileRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "connection_profile", - "type": "google.cloud.clouddms_v1.types.ConnectionProfile" - }, - { - "name": "connection_profile_id", - "type": "str" + "type": "google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest" }, { "name": "retry", @@ -147,21 +123,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_connection_profile" + "shortName": "apply_conversion_workspace" }, - "description": "Sample for CreateConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py", + "description": "Sample for ApplyConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_sync", "segments": [ { - "end": 63, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 63, + "end": 56, "start": 27, "type": "SHORT" }, @@ -171,22 +147,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 53, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 60, - "start": 54, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 64, - "start": 61, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py" + "title": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py" }, { "canonical": true, @@ -196,31 +172,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.commit_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CommitConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "CreateMigrationJob" + "shortName": "CommitConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.CreateMigrationJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "migration_job", - "type": "google.cloud.clouddms_v1.types.MigrationJob" - }, - { - "name": "migration_job_id", - "type": "str" + "type": "google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest" }, { "name": "retry", @@ -236,21 +200,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_migration_job" + "shortName": "commit_conversion_workspace" }, - "description": "Sample for CreateMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_create_migration_job_async.py", + "description": "Sample for CommitConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMigrationJob_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_async", "segments": [ { - "end": 64, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 64, + "end": 55, "start": 27, "type": "SHORT" }, @@ -260,22 +224,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 54, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 61, - "start": 55, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 65, - "start": 62, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_create_migration_job_async.py" + "title": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py" }, { "canonical": true, @@ -284,31 +248,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.commit_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CommitConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "CreateMigrationJob" + "shortName": "CommitConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.CreateMigrationJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "migration_job", - "type": "google.cloud.clouddms_v1.types.MigrationJob" - }, - { - "name": "migration_job_id", - "type": "str" + "type": "google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest" }, { "name": "retry", @@ -324,21 +276,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_migration_job" + "shortName": "commit_conversion_workspace" }, - "description": "Sample for CreateMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_create_migration_job_sync.py", + "description": "Sample for CommitConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMigrationJob_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_sync", "segments": [ { - "end": 64, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 64, + "end": 55, "start": 27, "type": "SHORT" }, @@ -348,22 +300,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 54, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 61, - "start": 55, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 65, - "start": 62, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_create_migration_job_sync.py" + "title": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py" }, { "canonical": true, @@ -373,23 +325,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_connection_profile", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.convert_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConnectionProfile", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ConvertConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "DeleteConnectionProfile" + "shortName": "ConvertConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest" }, { "name": "retry", @@ -405,21 +353,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_connection_profile" + "shortName": "convert_conversion_workspace" }, - "description": "Sample for DeleteConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py", + "description": "Sample for ConvertConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_async", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -429,22 +377,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py" + "title": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py" }, { "canonical": true, @@ -453,23 +401,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_connection_profile", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.convert_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConnectionProfile", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ConvertConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "DeleteConnectionProfile" + "shortName": "ConvertConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest" }, { "name": "retry", @@ -485,21 +429,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_connection_profile" + "shortName": "convert_conversion_workspace" }, - "description": "Sample for DeleteConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py", + "description": "Sample for ConvertConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_sync", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -509,22 +453,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py" + "title": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py" }, { "canonical": true, @@ -534,22 +478,30 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_connection_profile", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConnectionProfile", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "DeleteMigrationJob" + "shortName": "CreateConnectionProfile" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.DeleteMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.CreateConnectionProfileRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "connection_profile", + "type": "google.cloud.clouddms_v1.types.ConnectionProfile" + }, + { + "name": "connection_profile_id", "type": "str" }, { @@ -566,21 +518,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_migration_job" + "shortName": "create_connection_profile" }, - "description": "Sample for DeleteMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_delete_migration_job_async.py", + "description": "Sample for CreateConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_create_connection_profile_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_async", "segments": [ { - "end": 55, + "end": 63, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 63, "start": 27, "type": "SHORT" }, @@ -590,22 +542,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 53, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 60, + "start": 54, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 64, + "start": 61, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_delete_migration_job_async.py" + "title": "datamigration_v1_generated_data_migration_service_create_connection_profile_async.py" }, { "canonical": true, @@ -614,22 +566,30 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_connection_profile", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConnectionProfile", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "DeleteMigrationJob" + "shortName": "CreateConnectionProfile" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.DeleteMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.CreateConnectionProfileRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "connection_profile", + "type": "google.cloud.clouddms_v1.types.ConnectionProfile" + }, + { + "name": "connection_profile_id", "type": "str" }, { @@ -646,13 +606,2605 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_migration_job" + "shortName": "create_connection_profile" }, - "description": "Sample for DeleteMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py", + "description": "Sample for CreateConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "conversion_workspace_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_conversion_workspace" + }, + "description": "Sample for CreateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "conversion_workspace_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_conversion_workspace" + }, + "description": "Sample for CreateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateMigrationJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "migration_job", + "type": "google.cloud.clouddms_v1.types.MigrationJob" + }, + { + "name": "migration_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_migration_job" + }, + "description": "Sample for CreateMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_create_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMigrationJob_async", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateMigrationJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "migration_job", + "type": "google.cloud.clouddms_v1.types.MigrationJob" + }, + { + "name": "migration_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_migration_job" + }, + "description": "Sample for CreateMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_create_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMigrationJob_sync", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreatePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreatePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "private_connection", + "type": "google.cloud.clouddms_v1.types.PrivateConnection" + }, + { + "name": "private_connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_private_connection" + }, + "description": "Sample for CreatePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_create_private_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_private_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreatePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreatePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "private_connection", + "type": "google.cloud.clouddms_v1.types.PrivateConnection" + }, + { + "name": "private_connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_private_connection" + }, + "description": "Sample for CreatePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_create_private_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_private_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_connection_profile" + }, + "description": "Sample for DeleteConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_connection_profile" + }, + "description": "Sample for DeleteConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_conversion_workspace" + }, + "description": "Sample for DeleteConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_conversion_workspace" + }, + "description": "Sample for DeleteConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_migration_job" + }, + "description": "Sample for DeleteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_delete_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_migration_job" + }, + "description": "Sample for DeleteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeletePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeletePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_private_connection" + }, + "description": "Sample for DeletePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_delete_private_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_private_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeletePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeletePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_private_connection" + }, + "description": "Sample for DeletePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.describe_conversion_workspace_revisions", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeConversionWorkspaceRevisions", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeConversionWorkspaceRevisions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse", + "shortName": "describe_conversion_workspace_revisions" + }, + "description": "Sample for DescribeConversionWorkspaceRevisions", + "file": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.describe_conversion_workspace_revisions", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeConversionWorkspaceRevisions", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeConversionWorkspaceRevisions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse", + "shortName": "describe_conversion_workspace_revisions" + }, + "description": "Sample for DescribeConversionWorkspaceRevisions", + "file": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.describe_database_entities", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeDatabaseEntities", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeDatabaseEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesAsyncPager", + "shortName": "describe_database_entities" + }, + "description": "Sample for DescribeDatabaseEntities", + "file": "datamigration_v1_generated_data_migration_service_describe_database_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_database_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.describe_database_entities", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeDatabaseEntities", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeDatabaseEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesPager", + "shortName": "describe_database_entities" + }, + "description": "Sample for DescribeDatabaseEntities", + "file": "datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.fetch_static_ips", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.FetchStaticIps", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "FetchStaticIps" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.FetchStaticIpsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsAsyncPager", + "shortName": "fetch_static_ips" + }, + "description": "Sample for FetchStaticIps", + "file": "datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_FetchStaticIps_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.fetch_static_ips", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.FetchStaticIps", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "FetchStaticIps" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.FetchStaticIpsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsPager", + "shortName": "fetch_static_ips" + }, + "description": "Sample for FetchStaticIps", + "file": "datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_FetchStaticIps_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.generate_ssh_script", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateSshScript", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GenerateSshScript" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GenerateSshScriptRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.SshScript", + "shortName": "generate_ssh_script" + }, + "description": "Sample for GenerateSshScript", + "file": "datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateSshScript_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.generate_ssh_script", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateSshScript", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GenerateSshScript" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GenerateSshScriptRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.SshScript", + "shortName": "generate_ssh_script" + }, + "description": "Sample for GenerateSshScript", + "file": "datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateSshScript_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConnectionProfile", + "shortName": "get_connection_profile" + }, + "description": "Sample for GetConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_get_connection_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConnectionProfile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_connection_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConnectionProfile", + "shortName": "get_connection_profile" + }, + "description": "Sample for GetConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConnectionProfile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConversionWorkspace", + "shortName": "get_conversion_workspace" + }, + "description": "Sample for GetConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConversionWorkspace", + "shortName": "get_conversion_workspace" + }, + "description": "Sample for GetConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.MigrationJob", + "shortName": "get_migration_job" + }, + "description": "Sample for GetMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_get_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetMigrationJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.MigrationJob", + "shortName": "get_migration_job" + }, + "description": "Sample for GetMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_get_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetMigrationJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetPrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetPrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetPrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.PrivateConnection", + "shortName": "get_private_connection" + }, + "description": "Sample for GetPrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_get_private_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetPrivateConnection_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_private_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetPrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetPrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetPrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.PrivateConnection", + "shortName": "get_private_connection" + }, + "description": "Sample for GetPrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_get_private_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetPrivateConnection_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_private_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.import_mapping_rules", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ImportMappingRules", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ImportMappingRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ImportMappingRulesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_mapping_rules" + }, + "description": "Sample for ImportMappingRules", + "file": "datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ImportMappingRules_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.import_mapping_rules", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ImportMappingRules", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ImportMappingRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ImportMappingRulesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_mapping_rules" + }, + "description": "Sample for ImportMappingRules", + "file": "datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ImportMappingRules_sync", "segments": [ { "end": 55, @@ -660,7 +3212,249 @@ "type": "FULL" }, { - "end": 55, + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_connection_profiles", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConnectionProfiles", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConnectionProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConnectionProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesAsyncPager", + "shortName": "list_connection_profiles" + }, + "description": "Sample for ListConnectionProfiles", + "file": "datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_connection_profiles", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConnectionProfiles", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConnectionProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConnectionProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesPager", + "shortName": "list_connection_profiles" + }, + "description": "Sample for ListConnectionProfiles", + "file": "datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_conversion_workspaces", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConversionWorkspaces", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConversionWorkspaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesAsyncPager", + "shortName": "list_conversion_workspaces" + }, + "description": "Sample for ListConversionWorkspaces", + "file": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, "start": 27, "type": "SHORT" }, @@ -674,18 +3468,98 @@ "start": 41, "type": "REQUEST_INITIALIZATION" }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_conversion_workspaces", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConversionWorkspaces", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConversionWorkspaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesPager", + "shortName": "list_conversion_workspaces" + }, + "description": "Sample for ListConversionWorkspaces", + "file": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, { "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py" + "title": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py" }, { "canonical": true, @@ -695,19 +3569,23 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.generate_ssh_script", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_migration_jobs", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateSshScript", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMigrationJobs", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "GenerateSshScript" + "shortName": "ListMigrationJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.GenerateSshScriptRequest" + "type": "google.cloud.clouddms_v1.types.ListMigrationJobsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -722,22 +3600,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.types.SshScript", - "shortName": "generate_ssh_script" + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsAsyncPager", + "shortName": "list_migration_jobs" }, - "description": "Sample for GenerateSshScript", - "file": "datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py", + "description": "Sample for ListMigrationJobs", + "file": "datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateSshScript_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListMigrationJobs_async", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -747,22 +3625,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py" + "title": "datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py" }, { "canonical": true, @@ -771,19 +3649,23 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.generate_ssh_script", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_migration_jobs", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateSshScript", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMigrationJobs", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "GenerateSshScript" + "shortName": "ListMigrationJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.GenerateSshScriptRequest" + "type": "google.cloud.clouddms_v1.types.ListMigrationJobsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -798,22 +3680,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.types.SshScript", - "shortName": "generate_ssh_script" + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsPager", + "shortName": "list_migration_jobs" }, - "description": "Sample for GenerateSshScript", - "file": "datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py", + "description": "Sample for ListMigrationJobs", + "file": "datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateSshScript_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListMigrationJobs_sync", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -823,22 +3705,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py" + "title": "datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py" }, { "canonical": true, @@ -848,22 +3730,22 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_connection_profile", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_private_connections", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConnectionProfile", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListPrivateConnections", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "GetConnectionProfile" + "shortName": "ListPrivateConnections" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.GetConnectionProfileRequest" + "type": "google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -879,22 +3761,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.types.ConnectionProfile", - "shortName": "get_connection_profile" + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsAsyncPager", + "shortName": "list_private_connections" }, - "description": "Sample for GetConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_get_connection_profile_async.py", + "description": "Sample for ListPrivateConnections", + "file": "datamigration_v1_generated_data_migration_service_list_private_connections_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetConnectionProfile_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListPrivateConnections_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -914,12 +3796,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_get_connection_profile_async.py" + "title": "datamigration_v1_generated_data_migration_service_list_private_connections_async.py" }, { "canonical": true, @@ -928,22 +3810,22 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_connection_profile", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_private_connections", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConnectionProfile", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListPrivateConnections", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "GetConnectionProfile" + "shortName": "ListPrivateConnections" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.GetConnectionProfileRequest" + "type": "google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -959,22 +3841,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.types.ConnectionProfile", - "shortName": "get_connection_profile" + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsPager", + "shortName": "list_private_connections" }, - "description": "Sample for GetConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py", + "description": "Sample for ListPrivateConnections", + "file": "datamigration_v1_generated_data_migration_service_list_private_connections_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetConnectionProfile_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListPrivateConnections_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -994,12 +3876,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py" + "title": "datamigration_v1_generated_data_migration_service_list_private_connections_sync.py" }, { "canonical": true, @@ -1009,23 +3891,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.promote_migration_job", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.PromoteMigrationJob", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "GetMigrationJob" + "shortName": "PromoteMigrationJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.GetMigrationJobRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.clouddms_v1.types.PromoteMigrationJobRequest" }, { "name": "retry", @@ -1040,22 +3918,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.types.MigrationJob", - "shortName": "get_migration_job" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "promote_migration_job" }, - "description": "Sample for GetMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_get_migration_job_async.py", + "description": "Sample for PromoteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_promote_migration_job_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetMigrationJob_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_async", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1065,22 +3943,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_get_migration_job_async.py" + "title": "datamigration_v1_generated_data_migration_service_promote_migration_job_async.py" }, { "canonical": true, @@ -1089,23 +3967,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.promote_migration_job", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.PromoteMigrationJob", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "GetMigrationJob" + "shortName": "PromoteMigrationJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.GetMigrationJobRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.clouddms_v1.types.PromoteMigrationJobRequest" }, { "name": "retry", @@ -1120,22 +3994,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.types.MigrationJob", - "shortName": "get_migration_job" + "resultType": "google.api_core.operation.Operation", + "shortName": "promote_migration_job" }, - "description": "Sample for GetMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_get_migration_job_sync.py", + "description": "Sample for PromoteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetMigrationJob_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_sync", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1145,22 +4019,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_get_migration_job_sync.py" + "title": "datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py" }, { "canonical": true, @@ -1170,23 +4044,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_connection_profiles", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.restart_migration_job", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConnectionProfiles", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RestartMigrationJob", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "ListConnectionProfiles" + "shortName": "RestartMigrationJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.ListConnectionProfilesRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.clouddms_v1.types.RestartMigrationJobRequest" }, { "name": "retry", @@ -1201,22 +4071,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesAsyncPager", - "shortName": "list_connection_profiles" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restart_migration_job" }, - "description": "Sample for ListConnectionProfiles", - "file": "datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py", + "description": "Sample for RestartMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_restart_migration_job_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_RestartMigrationJob_async", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1226,22 +4096,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py" + "title": "datamigration_v1_generated_data_migration_service_restart_migration_job_async.py" }, { "canonical": true, @@ -1250,23 +4120,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_connection_profiles", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.restart_migration_job", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConnectionProfiles", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RestartMigrationJob", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "ListConnectionProfiles" + "shortName": "RestartMigrationJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.ListConnectionProfilesRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.clouddms_v1.types.RestartMigrationJobRequest" }, { "name": "retry", @@ -1281,22 +4147,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesPager", - "shortName": "list_connection_profiles" + "resultType": "google.api_core.operation.Operation", + "shortName": "restart_migration_job" }, - "description": "Sample for ListConnectionProfiles", - "file": "datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py", + "description": "Sample for RestartMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_RestartMigrationJob_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1306,22 +4172,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py" + "title": "datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py" }, { "canonical": true, @@ -1331,23 +4197,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_migration_jobs", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.resume_migration_job", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMigrationJobs", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ResumeMigrationJob", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "ListMigrationJobs" + "shortName": "ResumeMigrationJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.ListMigrationJobsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.clouddms_v1.types.ResumeMigrationJobRequest" }, { "name": "retry", @@ -1362,22 +4224,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsAsyncPager", - "shortName": "list_migration_jobs" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "resume_migration_job" }, - "description": "Sample for ListMigrationJobs", - "file": "datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py", + "description": "Sample for ResumeMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_resume_migration_job_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListMigrationJobs_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_async", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1387,22 +4249,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py" + "title": "datamigration_v1_generated_data_migration_service_resume_migration_job_async.py" }, { "canonical": true, @@ -1411,23 +4273,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_migration_jobs", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.resume_migration_job", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMigrationJobs", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ResumeMigrationJob", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "ListMigrationJobs" + "shortName": "ResumeMigrationJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.ListMigrationJobsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.clouddms_v1.types.ResumeMigrationJobRequest" }, { "name": "retry", @@ -1442,22 +4300,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsPager", - "shortName": "list_migration_jobs" + "resultType": "google.api_core.operation.Operation", + "shortName": "resume_migration_job" }, - "description": "Sample for ListMigrationJobs", - "file": "datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py", + "description": "Sample for ResumeMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListMigrationJobs_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1467,22 +4325,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py" + "title": "datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py" }, { "canonical": true, @@ -1492,19 +4350,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.promote_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.rollback_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.PromoteMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RollbackConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "PromoteMigrationJob" + "shortName": "RollbackConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.PromoteMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest" }, { "name": "retry", @@ -1520,21 +4378,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "promote_migration_job" + "shortName": "rollback_conversion_workspace" }, - "description": "Sample for PromoteMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_promote_migration_job_async.py", + "description": "Sample for RollbackConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_async", "segments": [ { - "end": 54, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1544,22 +4402,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_promote_migration_job_async.py" + "title": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py" }, { "canonical": true, @@ -1568,19 +4426,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.promote_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.rollback_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.PromoteMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RollbackConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "PromoteMigrationJob" + "shortName": "RollbackConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.PromoteMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest" }, { "name": "retry", @@ -1596,21 +4454,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "promote_migration_job" + "shortName": "rollback_conversion_workspace" }, - "description": "Sample for PromoteMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py", + "description": "Sample for RollbackConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_sync", "segments": [ { - "end": 54, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1620,22 +4478,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py" + "title": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py" }, { "canonical": true, @@ -1645,19 +4503,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.restart_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.search_background_jobs", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.RestartMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SearchBackgroundJobs", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "RestartMigrationJob" + "shortName": "SearchBackgroundJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.RestartMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest" }, { "name": "retry", @@ -1672,22 +4530,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "restart_migration_job" + "resultType": "google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse", + "shortName": "search_background_jobs" }, - "description": "Sample for RestartMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_restart_migration_job_async.py", + "description": "Sample for SearchBackgroundJobs", + "file": "datamigration_v1_generated_data_migration_service_search_background_jobs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_RestartMigrationJob_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_async", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1697,22 +4555,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_restart_migration_job_async.py" + "title": "datamigration_v1_generated_data_migration_service_search_background_jobs_async.py" }, { "canonical": true, @@ -1721,19 +4579,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.restart_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.search_background_jobs", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.RestartMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SearchBackgroundJobs", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "RestartMigrationJob" + "shortName": "SearchBackgroundJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.RestartMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest" }, { "name": "retry", @@ -1748,22 +4606,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "restart_migration_job" + "resultType": "google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse", + "shortName": "search_background_jobs" }, - "description": "Sample for RestartMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py", + "description": "Sample for SearchBackgroundJobs", + "file": "datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_RestartMigrationJob_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1773,22 +4631,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py" + "title": "datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py" }, { "canonical": true, @@ -1798,19 +4656,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.resume_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.seed_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ResumeMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SeedConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "ResumeMigrationJob" + "shortName": "SeedConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.ResumeMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest" }, { "name": "retry", @@ -1826,21 +4684,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "resume_migration_job" + "shortName": "seed_conversion_workspace" }, - "description": "Sample for ResumeMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_resume_migration_job_async.py", + "description": "Sample for SeedConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_async", "segments": [ { - "end": 54, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1850,22 +4708,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_resume_migration_job_async.py" + "title": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py" }, { "canonical": true, @@ -1874,19 +4732,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.resume_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.seed_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ResumeMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SeedConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "ResumeMigrationJob" + "shortName": "SeedConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.ResumeMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest" }, { "name": "retry", @@ -1902,21 +4760,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "resume_migration_job" + "shortName": "seed_conversion_workspace" }, - "description": "Sample for ResumeMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py", + "description": "Sample for SeedConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_sync", "segments": [ { - "end": 54, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1926,22 +4784,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py" + "title": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py" }, { "canonical": true, @@ -2418,6 +5276,175 @@ ], "title": "datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.update_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_conversion_workspace" + }, + "description": "Sample for UpdateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.update_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_conversion_workspace" + }, + "description": "Sample for UpdateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/scripts/fixup_clouddms_v1_keywords.py b/scripts/fixup_clouddms_v1_keywords.py index 10c3dee..ad6ad15 100644 --- a/scripts/fixup_clouddms_v1_keywords.py +++ b/scripts/fixup_clouddms_v1_keywords.py @@ -39,21 +39,40 @@ def partition( class clouddmsCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_connection_profile': ('parent', 'connection_profile_id', 'connection_profile', 'request_id', ), + 'apply_conversion_workspace': ('name', 'filter', 'connection_profile', ), + 'commit_conversion_workspace': ('name', 'commit_name', ), + 'convert_conversion_workspace': ('name', 'auto_commit', 'filter', ), + 'create_connection_profile': ('parent', 'connection_profile_id', 'connection_profile', 'request_id', 'validate_only', 'skip_validation', ), + 'create_conversion_workspace': ('parent', 'conversion_workspace_id', 'conversion_workspace', 'request_id', ), 'create_migration_job': ('parent', 'migration_job_id', 'migration_job', 'request_id', ), + 'create_private_connection': ('parent', 'private_connection_id', 'private_connection', 'request_id', 'skip_validation', ), 'delete_connection_profile': ('name', 'request_id', 'force', ), + 'delete_conversion_workspace': ('name', 'request_id', ), 'delete_migration_job': ('name', 'request_id', 'force', ), + 'delete_private_connection': ('name', 'request_id', ), + 'describe_conversion_workspace_revisions': ('conversion_workspace', 'commit_id', ), + 'describe_database_entities': ('conversion_workspace', 'page_size', 'page_token', 'tree', 'uncommitted', 'commit_id', 'filter', ), + 'fetch_static_ips': ('name', 'page_size', 'page_token', ), 'generate_ssh_script': ('vm', 'migration_job', 'vm_creation_config', 'vm_selection_config', 'vm_port', ), 'get_connection_profile': ('name', ), + 'get_conversion_workspace': ('name', ), 'get_migration_job': ('name', ), + 'get_private_connection': ('name', ), + 'import_mapping_rules': ('parent', 'rules_format', 'rules_files', 'auto_commit', ), 'list_connection_profiles': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_conversion_workspaces': ('parent', 'page_size', 'page_token', 'filter', ), 'list_migration_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_private_connections': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'promote_migration_job': ('name', ), 'restart_migration_job': ('name', ), 'resume_migration_job': ('name', ), + 'rollback_conversion_workspace': ('name', ), + 'search_background_jobs': ('conversion_workspace', 'return_most_recent_per_job_type', 'max_size', 'completed_until_time', ), + 'seed_conversion_workspace': ('name', 'auto_commit', 'source_connection_profile', 'destination_connection_profile', ), 'start_migration_job': ('name', ), 'stop_migration_job': ('name', ), - 'update_connection_profile': ('update_mask', 'connection_profile', 'request_id', ), + 'update_connection_profile': ('update_mask', 'connection_profile', 'request_id', 'validate_only', 'skip_validation', ), + 'update_conversion_workspace': ('update_mask', 'conversion_workspace', 'request_id', ), 'update_migration_job': ('update_mask', 'migration_job', 'request_id', ), 'verify_migration_job': ('name', ), } diff --git a/setup.py b/setup.py index 8ce5121..dc4db60 100644 --- a/setup.py +++ b/setup.py @@ -40,6 +40,7 @@ "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] url = "https://github.com/googleapis/python-dms" diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt index ed7f9ae..ad3f0fa 100644 --- a/testing/constraints-3.10.txt +++ b/testing/constraints-3.10.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt index ed7f9ae..ad3f0fa 100644 --- a/testing/constraints-3.11.txt +++ b/testing/constraints-3.11.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt index ed7f9ae..ad3f0fa 100644 --- a/testing/constraints-3.12.txt +++ b/testing/constraints-3.12.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 6c44adf..2beecf9 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -7,3 +7,4 @@ google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index ed7f9ae..ad3f0fa 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index ed7f9ae..ad3f0fa 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/tests/unit/gapic/clouddms_v1/test_data_migration_service.py b/tests/unit/gapic/clouddms_v1/test_data_migration_service.py index 8815fd3..8d4ed14 100644 --- a/tests/unit/gapic/clouddms_v1/test_data_migration_service.py +++ b/tests/unit/gapic/clouddms_v1/test_data_migration_service.py @@ -39,6 +39,10 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore @@ -60,7 +64,11 @@ pagers, transports, ) -from google.cloud.clouddms_v1.types import clouddms, clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms, + clouddms_resources, + conversionworkspace_resources, +) def client_cert_source_callback(): @@ -1197,6 +1205,8 @@ def test_get_migration_job(request_type, transport: str = "grpc"): dump_path="dump_path_value", source="source_value", destination="destination_value", + filter="filter_value", + cmek_key_name="cmek_key_name_value", reverse_ssh_connectivity=clouddms_resources.ReverseSshConnectivity( vm_ip="vm_ip_value" ), @@ -1218,6 +1228,8 @@ def test_get_migration_job(request_type, transport: str = "grpc"): assert response.dump_path == "dump_path_value" assert response.source == "source_value" assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.cmek_key_name == "cmek_key_name_value" def test_get_migration_job_empty_call(): @@ -1266,6 +1278,8 @@ async def test_get_migration_job_async( dump_path="dump_path_value", source="source_value", destination="destination_value", + filter="filter_value", + cmek_key_name="cmek_key_name_value", ) ) response = await client.get_migration_job(request) @@ -1285,6 +1299,8 @@ async def test_get_migration_job_async( assert response.dump_path == "dump_path_value" assert response.source == "source_value" assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.cmek_key_name == "cmek_key_name_value" @pytest.mark.asyncio @@ -4735,742 +4751,6926 @@ async def test_delete_connection_profile_flattened_error_async(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataMigrationServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + clouddms.CreatePrivateConnectionRequest, + dict, + ], +) +def test_create_private_connection(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - with pytest.raises(ValueError): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataMigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataMigrationServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # It is an error to provide an api_key and a transport instance. - transport = transports.DataMigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataMigrationServiceClient( - client_options=options, - transport=transport, - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_private_connection(request) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataMigrationServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreatePrivateConnectionRequest() - # It is an error to provide scopes and a transport instance. - transport = transports.DataMigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataMigrationServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataMigrationServiceGrpcTransport( +def test_create_private_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - client = DataMigrationServiceClient(transport=transport) - assert client.transport is transport + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + client.create_private_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreatePrivateConnectionRequest() -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataMigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - transport = transports.DataMigrationServiceGrpcAsyncIOTransport( +@pytest.mark.asyncio +async def test_create_private_connection_async( + transport: str = "grpc_asyncio", + request_type=clouddms.CreatePrivateConnectionRequest, +): + client = DataMigrationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataMigrationServiceGrpcTransport, - transports.DataMigrationServiceGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_private_connection(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreatePrivateConnectionRequest() -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - ], -) -def test_transport_kind(transport_name): - transport = DataMigrationServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. +@pytest.mark.asyncio +async def test_create_private_connection_async_from_dict(): + await test_create_private_connection_async(request_type=dict) + + +def test_create_private_connection_field_headers(): client = DataMigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) - assert isinstance( - client.transport, - transports.DataMigrationServiceGrpcTransport, - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreatePrivateConnectionRequest() -def test_data_migration_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DataMigrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) + request.parent = "parent_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_private_connection(request) -def test_data_migration_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.DataMigrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "list_migration_jobs", - "get_migration_job", - "create_migration_job", - "update_migration_job", - "delete_migration_job", - "start_migration_job", - "stop_migration_job", - "resume_migration_job", - "promote_migration_job", - "verify_migration_job", - "restart_migration_job", - "generate_ssh_script", - "list_connection_profiles", - "get_connection_profile", - "create_connection_profile", - "update_connection_profile", - "delete_connection_profile", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] - with pytest.raises(NotImplementedError): - transport.close() - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client +@pytest.mark.asyncio +async def test_create_private_connection_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreatePrivateConnectionRequest() + request.parent = "parent_value" -def test_data_migration_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataMigrationServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", + type(client.transport.create_private_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) + await client.create_private_connection(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_data_migration_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataMigrationServiceTransport() - adc.assert_called_once() - - -def test_data_migration_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DataMigrationServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataMigrationServiceGrpcTransport, - transports.DataMigrationServiceGrpcAsyncIOTransport, - ], -) -def test_data_migration_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) +def test_create_private_connection_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_private_connection( + parent="parent_value", + private_connection=clouddms_resources.PrivateConnection(name="name_value"), + private_connection_id="private_connection_id_value", + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataMigrationServiceGrpcTransport, - transports.DataMigrationServiceGrpcAsyncIOTransport, - ], -) -def test_data_migration_service_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].private_connection + mock_val = clouddms_resources.PrivateConnection(name="name_value") + assert arg == mock_val + arg = args[0].private_connection_id + mock_val = "private_connection_id_value" + assert arg == mock_val -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DataMigrationServiceGrpcTransport, grpc_helpers), - (transports.DataMigrationServiceGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_data_migration_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) +def test_create_private_connection_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - create_channel.assert_called_with( - "datamigration.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="datamigration.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_private_connection( + clouddms.CreatePrivateConnectionRequest(), + parent="parent_value", + private_connection=clouddms_resources.PrivateConnection(name="name_value"), + private_connection_id="private_connection_id_value", ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataMigrationServiceGrpcTransport, - transports.DataMigrationServiceGrpcAsyncIOTransport, - ], -) -def test_data_migration_service_grpc_transport_client_cert_source_for_mtls( - transport_class, -): - cred = ga_credentials.AnonymousCredentials() +@pytest.mark.asyncio +async def test_create_private_connection_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_private_connection( + parent="parent_value", + private_connection=clouddms_resources.PrivateConnection(name="name_value"), + private_connection_id="private_connection_id_value", ) - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].private_connection + mock_val = clouddms_resources.PrivateConnection(name="name_value") + assert arg == mock_val + arg = args[0].private_connection_id + mock_val = "private_connection_id_value" + assert arg == mock_val -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - ], -) -def test_data_migration_service_host_no_port(transport_name): - client = DataMigrationServiceClient( +@pytest.mark.asyncio +async def test_create_private_connection_flattened_error_async(): + client = DataMigrationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="datamigration.googleapis.com" - ), - transport=transport_name, ) - assert client.transport._host == ("datamigration.googleapis.com:443") + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_private_connection( + clouddms.CreatePrivateConnectionRequest(), + parent="parent_value", + private_connection=clouddms_resources.PrivateConnection(name="name_value"), + private_connection_id="private_connection_id_value", + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", - "grpc_asyncio", + clouddms.GetPrivateConnectionRequest, + dict, ], ) -def test_data_migration_service_host_with_port(transport_name): +def test_get_private_connection(request_type, transport: str = "grpc"): client = DataMigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="datamigration.googleapis.com:8000" - ), - transport=transport_name, + transport=transport, ) - assert client.transport._host == ("datamigration.googleapis.com:8000") + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -def test_data_migration_service_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.PrivateConnection( + name="name_value", + display_name="display_name_value", + state=clouddms_resources.PrivateConnection.State.CREATING, + vpc_peering_config=clouddms_resources.VpcPeeringConfig( + vpc_name="vpc_name_value" + ), + ) + response = client.get_private_connection(request) - # Check that channel is used if provided. - transport = transports.DataMigrationServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetPrivateConnectionRequest() + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.PrivateConnection) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == clouddms_resources.PrivateConnection.State.CREATING -def test_data_migration_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - # Check that channel is used if provided. - transport = transports.DataMigrationServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, +def test_get_private_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataMigrationServiceGrpcTransport, - transports.DataMigrationServiceGrpcAsyncIOTransport, - ], -) -def test_data_migration_service_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + client.get_private_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetPrivateConnectionRequest() - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() +@pytest.mark.asyncio +async def test_get_private_connection_async( + transport: str = "grpc_asyncio", request_type=clouddms.GetPrivateConnectionRequest +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms_resources.PrivateConnection( + name="name_value", + display_name="display_name_value", + state=clouddms_resources.PrivateConnection.State.CREATING, ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred + ) + response = await client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetPrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.PrivateConnection) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == clouddms_resources.PrivateConnection.State.CREATING + + +@pytest.mark.asyncio +async def test_get_private_connection_async_from_dict(): + await test_get_private_connection_async(request_type=dict) + + +def test_get_private_connection_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetPrivateConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + call.return_value = clouddms_resources.PrivateConnection() + client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_private_connection_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetPrivateConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms_resources.PrivateConnection() + ) + await client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_private_connection_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.PrivateConnection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_private_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_private_connection_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_private_connection( + clouddms.GetPrivateConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_private_connection_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.PrivateConnection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms_resources.PrivateConnection() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_private_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_private_connection_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_private_connection( + clouddms.GetPrivateConnectionRequest(), + name="name_value", + ) -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( - "transport_class", + "request_type", [ - transports.DataMigrationServiceGrpcTransport, - transports.DataMigrationServiceGrpcAsyncIOTransport, + clouddms.ListPrivateConnectionsRequest, + dict, ], ) -def test_data_migration_service_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() +def test_list_private_connections(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListPrivateConnectionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListPrivateConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPrivateConnectionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_private_connections_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + client.list_private_connections() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListPrivateConnectionsRequest() + + +@pytest.mark.asyncio +async def test_list_private_connections_async( + transport: str = "grpc_asyncio", request_type=clouddms.ListPrivateConnectionsRequest +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.ListPrivateConnectionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListPrivateConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPrivateConnectionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_private_connections_async_from_dict(): + await test_list_private_connections_async(request_type=dict) + + +def test_list_private_connections_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListPrivateConnectionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + call.return_value = clouddms.ListPrivateConnectionsResponse() + client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_private_connections_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListPrivateConnectionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.ListPrivateConnectionsResponse() + ) + await client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_private_connections_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListPrivateConnectionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_private_connections( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_private_connections_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_private_connections( + clouddms.ListPrivateConnectionsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_private_connections_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListPrivateConnectionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.ListPrivateConnectionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_private_connections( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_private_connections_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_private_connections( + clouddms.ListPrivateConnectionsRequest(), + parent="parent_value", + ) + + +def test_list_private_connections_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token="abc", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token="def", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token="ghi", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_private_connections(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, clouddms_resources.PrivateConnection) for i in results) + + +def test_list_private_connections_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token="abc", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token="def", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token="ghi", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + pages = list(client.list_private_connections(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_private_connections_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token="abc", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token="def", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token="ghi", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_private_connections( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, clouddms_resources.PrivateConnection) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_private_connections_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token="abc", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token="def", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token="ghi", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_private_connections(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.DeletePrivateConnectionRequest, + dict, + ], +) +def test_delete_private_connection(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeletePrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_private_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + client.delete_private_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeletePrivateConnectionRequest() + + +@pytest.mark.asyncio +async def test_delete_private_connection_async( + transport: str = "grpc_asyncio", + request_type=clouddms.DeletePrivateConnectionRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeletePrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_private_connection_async_from_dict(): + await test_delete_private_connection_async(request_type=dict) + + +def test_delete_private_connection_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeletePrivateConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_private_connection_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeletePrivateConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_private_connection_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_private_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_private_connection_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_private_connection( + clouddms.DeletePrivateConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_private_connection_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_private_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_private_connection_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_private_connection( + clouddms.DeletePrivateConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.GetConversionWorkspaceRequest, + dict, + ], +) +def test_get_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.ConversionWorkspace( + name="name_value", + has_uncommitted_changes=True, + latest_commit_id="latest_commit_id_value", + display_name="display_name_value", + ) + response = client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, conversionworkspace_resources.ConversionWorkspace) + assert response.name == "name_value" + assert response.has_uncommitted_changes is True + assert response.latest_commit_id == "latest_commit_id_value" + assert response.display_name == "display_name_value" + + +def test_get_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), "__call__" + ) as call: + client.get_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_get_conversion_workspace_async( + transport: str = "grpc_asyncio", request_type=clouddms.GetConversionWorkspaceRequest +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + conversionworkspace_resources.ConversionWorkspace( + name="name_value", + has_uncommitted_changes=True, + latest_commit_id="latest_commit_id_value", + display_name="display_name_value", + ) + ) + response = await client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, conversionworkspace_resources.ConversionWorkspace) + assert response.name == "name_value" + assert response.has_uncommitted_changes is True + assert response.latest_commit_id == "latest_commit_id_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_conversion_workspace_async_from_dict(): + await test_get_conversion_workspace_async(request_type=dict) + + +def test_get_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), "__call__" + ) as call: + call.return_value = conversionworkspace_resources.ConversionWorkspace() + client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + conversionworkspace_resources.ConversionWorkspace() + ) + await client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.ConversionWorkspace() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_conversion_workspace( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_conversion_workspace( + clouddms.GetConversionWorkspaceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.ConversionWorkspace() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + conversionworkspace_resources.ConversionWorkspace() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_conversion_workspace( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_conversion_workspace( + clouddms.GetConversionWorkspaceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.ListConversionWorkspacesRequest, + dict, + ], +) +def test_list_conversion_workspaces(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConversionWorkspacesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListConversionWorkspacesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConversionWorkspacesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_conversion_workspaces_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + client.list_conversion_workspaces() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListConversionWorkspacesRequest() + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async( + transport: str = "grpc_asyncio", + request_type=clouddms.ListConversionWorkspacesRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.ListConversionWorkspacesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListConversionWorkspacesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConversionWorkspacesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async_from_dict(): + await test_list_conversion_workspaces_async(request_type=dict) + + +def test_list_conversion_workspaces_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListConversionWorkspacesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + call.return_value = clouddms.ListConversionWorkspacesResponse() + client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListConversionWorkspacesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.ListConversionWorkspacesResponse() + ) + await client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_conversion_workspaces_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConversionWorkspacesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_conversion_workspaces( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_conversion_workspaces_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_conversion_workspaces( + clouddms.ListConversionWorkspacesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConversionWorkspacesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.ListConversionWorkspacesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_conversion_workspaces( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_conversion_workspaces( + clouddms.ListConversionWorkspacesRequest(), + parent="parent_value", + ) + + +def test_list_conversion_workspaces_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="abc", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token="def", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="ghi", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_conversion_workspaces(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, conversionworkspace_resources.ConversionWorkspace) + for i in results + ) + + +def test_list_conversion_workspaces_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="abc", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token="def", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="ghi", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + pages = list(client.list_conversion_workspaces(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="abc", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token="def", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="ghi", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_conversion_workspaces( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, conversionworkspace_resources.ConversionWorkspace) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="abc", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token="def", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="ghi", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_conversion_workspaces(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.CreateConversionWorkspaceRequest, + dict, + ], +) +def test_create_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), "__call__" + ) as call: + client.create_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_create_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.CreateConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_conversion_workspace_async_from_dict(): + await test_create_conversion_workspace_async(request_type=dict) + + +def test_create_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateConversionWorkspaceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateConversionWorkspaceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_conversion_workspace( + parent="parent_value", + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + conversion_workspace_id="conversion_workspace_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name="name_value") + assert arg == mock_val + arg = args[0].conversion_workspace_id + mock_val = "conversion_workspace_id_value" + assert arg == mock_val + + +def test_create_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_conversion_workspace( + clouddms.CreateConversionWorkspaceRequest(), + parent="parent_value", + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + conversion_workspace_id="conversion_workspace_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_conversion_workspace( + parent="parent_value", + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + conversion_workspace_id="conversion_workspace_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name="name_value") + assert arg == mock_val + arg = args[0].conversion_workspace_id + mock_val = "conversion_workspace_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_conversion_workspace( + clouddms.CreateConversionWorkspaceRequest(), + parent="parent_value", + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + conversion_workspace_id="conversion_workspace_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.UpdateConversionWorkspaceRequest, + dict, + ], +) +def test_update_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), "__call__" + ) as call: + client.update_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_update_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.UpdateConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_conversion_workspace_async_from_dict(): + await test_update_conversion_workspace_async(request_type=dict) + + +def test_update_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateConversionWorkspaceRequest() + + request.conversion_workspace.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateConversionWorkspaceRequest() + + request.conversion_workspace.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace.name=name_value", + ) in kw["metadata"] + + +def test_update_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_conversion_workspace( + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_conversion_workspace( + clouddms.UpdateConversionWorkspaceRequest(), + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_conversion_workspace( + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_conversion_workspace( + clouddms.UpdateConversionWorkspaceRequest(), + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.DeleteConversionWorkspaceRequest, + dict, + ], +) +def test_delete_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), "__call__" + ) as call: + client.delete_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.DeleteConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_async_from_dict(): + await test_delete_conversion_workspace_async(request_type=dict) + + +def test_delete_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_conversion_workspace( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_conversion_workspace( + clouddms.DeleteConversionWorkspaceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_conversion_workspace( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_conversion_workspace( + clouddms.DeleteConversionWorkspaceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.SeedConversionWorkspaceRequest, + dict, + ], +) +def test_seed_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SeedConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_seed_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), "__call__" + ) as call: + client.seed_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SeedConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_seed_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.SeedConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SeedConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_seed_conversion_workspace_async_from_dict(): + await test_seed_conversion_workspace_async(request_type=dict) + + +def test_seed_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SeedConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_seed_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SeedConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.ImportMappingRulesRequest, + dict, + ], +) +def test_import_mapping_rules(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ImportMappingRulesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_mapping_rules_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), "__call__" + ) as call: + client.import_mapping_rules() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ImportMappingRulesRequest() + + +@pytest.mark.asyncio +async def test_import_mapping_rules_async( + transport: str = "grpc_asyncio", request_type=clouddms.ImportMappingRulesRequest +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ImportMappingRulesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_import_mapping_rules_async_from_dict(): + await test_import_mapping_rules_async(request_type=dict) + + +def test_import_mapping_rules_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ImportMappingRulesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_mapping_rules_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ImportMappingRulesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.ConvertConversionWorkspaceRequest, + dict, + ], +) +def test_convert_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ConvertConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_convert_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), "__call__" + ) as call: + client.convert_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ConvertConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_convert_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.ConvertConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ConvertConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_convert_conversion_workspace_async_from_dict(): + await test_convert_conversion_workspace_async(request_type=dict) + + +def test_convert_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ConvertConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_convert_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ConvertConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.CommitConversionWorkspaceRequest, + dict, + ], +) +def test_commit_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CommitConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_commit_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), "__call__" + ) as call: + client.commit_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CommitConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_commit_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.CommitConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CommitConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_commit_conversion_workspace_async_from_dict(): + await test_commit_conversion_workspace_async(request_type=dict) + + +def test_commit_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CommitConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_commit_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CommitConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.RollbackConversionWorkspaceRequest, + dict, + ], +) +def test_rollback_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.RollbackConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_rollback_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), "__call__" + ) as call: + client.rollback_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.RollbackConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_rollback_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.RollbackConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.RollbackConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_rollback_conversion_workspace_async_from_dict(): + await test_rollback_conversion_workspace_async(request_type=dict) + + +def test_rollback_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.RollbackConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rollback_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.RollbackConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.ApplyConversionWorkspaceRequest, + dict, + ], +) +def test_apply_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ApplyConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_apply_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), "__call__" + ) as call: + client.apply_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ApplyConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_apply_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.ApplyConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ApplyConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_apply_conversion_workspace_async_from_dict(): + await test_apply_conversion_workspace_async(request_type=dict) + + +def test_apply_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ApplyConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_apply_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ApplyConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.DescribeDatabaseEntitiesRequest, + dict, + ], +) +def test_describe_database_entities(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.DescribeDatabaseEntitiesResponse( + next_page_token="next_page_token_value", + ) + response = client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeDatabaseEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.DescribeDatabaseEntitiesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_describe_database_entities_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), "__call__" + ) as call: + client.describe_database_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeDatabaseEntitiesRequest() + + +@pytest.mark.asyncio +async def test_describe_database_entities_async( + transport: str = "grpc_asyncio", + request_type=clouddms.DescribeDatabaseEntitiesRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.DescribeDatabaseEntitiesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeDatabaseEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.DescribeDatabaseEntitiesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_describe_database_entities_async_from_dict(): + await test_describe_database_entities_async(request_type=dict) + + +def test_describe_database_entities_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeDatabaseEntitiesRequest() + + request.conversion_workspace = "conversion_workspace_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), "__call__" + ) as call: + call.return_value = clouddms.DescribeDatabaseEntitiesResponse() + client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace=conversion_workspace_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_describe_database_entities_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeDatabaseEntitiesRequest() + + request.conversion_workspace = "conversion_workspace_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.DescribeDatabaseEntitiesResponse() + ) + await client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace=conversion_workspace_value", + ) in kw["metadata"] + + +def test_describe_database_entities_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="abc", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token="def", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="ghi", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("conversion_workspace", ""),)), + ) + pager = client.describe_database_entities(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, conversionworkspace_resources.DatabaseEntity) for i in results + ) + + +def test_describe_database_entities_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="abc", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token="def", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="ghi", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + pages = list(client.describe_database_entities(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_describe_database_entities_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="abc", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token="def", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="ghi", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + async_pager = await client.describe_database_entities( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, conversionworkspace_resources.DatabaseEntity) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_describe_database_entities_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="abc", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token="def", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="ghi", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.describe_database_entities(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.SearchBackgroundJobsRequest, + dict, + ], +) +def test_search_background_jobs(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.SearchBackgroundJobsResponse() + response = client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SearchBackgroundJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.SearchBackgroundJobsResponse) + + +def test_search_background_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), "__call__" + ) as call: + client.search_background_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SearchBackgroundJobsRequest() + + +@pytest.mark.asyncio +async def test_search_background_jobs_async( + transport: str = "grpc_asyncio", request_type=clouddms.SearchBackgroundJobsRequest +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.SearchBackgroundJobsResponse() + ) + response = await client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SearchBackgroundJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.SearchBackgroundJobsResponse) + + +@pytest.mark.asyncio +async def test_search_background_jobs_async_from_dict(): + await test_search_background_jobs_async(request_type=dict) + + +def test_search_background_jobs_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SearchBackgroundJobsRequest() + + request.conversion_workspace = "conversion_workspace_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), "__call__" + ) as call: + call.return_value = clouddms.SearchBackgroundJobsResponse() + client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace=conversion_workspace_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_search_background_jobs_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SearchBackgroundJobsRequest() + + request.conversion_workspace = "conversion_workspace_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.SearchBackgroundJobsResponse() + ) + await client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace=conversion_workspace_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.DescribeConversionWorkspaceRevisionsRequest, + dict, + ], +) +def test_describe_conversion_workspace_revisions(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.DescribeConversionWorkspaceRevisionsResponse() + response = client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeConversionWorkspaceRevisionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.DescribeConversionWorkspaceRevisionsResponse) + + +def test_describe_conversion_workspace_revisions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), "__call__" + ) as call: + client.describe_conversion_workspace_revisions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeConversionWorkspaceRevisionsRequest() + + +@pytest.mark.asyncio +async def test_describe_conversion_workspace_revisions_async( + transport: str = "grpc_asyncio", + request_type=clouddms.DescribeConversionWorkspaceRevisionsRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.DescribeConversionWorkspaceRevisionsResponse() + ) + response = await client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeConversionWorkspaceRevisionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.DescribeConversionWorkspaceRevisionsResponse) + + +@pytest.mark.asyncio +async def test_describe_conversion_workspace_revisions_async_from_dict(): + await test_describe_conversion_workspace_revisions_async(request_type=dict) + + +def test_describe_conversion_workspace_revisions_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeConversionWorkspaceRevisionsRequest() + + request.conversion_workspace = "conversion_workspace_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), "__call__" + ) as call: + call.return_value = clouddms.DescribeConversionWorkspaceRevisionsResponse() + client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace=conversion_workspace_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_describe_conversion_workspace_revisions_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeConversionWorkspaceRevisionsRequest() + + request.conversion_workspace = "conversion_workspace_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.DescribeConversionWorkspaceRevisionsResponse() + ) + await client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace=conversion_workspace_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.FetchStaticIpsRequest, + dict, + ], +) +def test_fetch_static_ips(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.FetchStaticIpsResponse( + static_ips=["static_ips_value"], + next_page_token="next_page_token_value", + ) + response = client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.FetchStaticIpsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchStaticIpsPager) + assert response.static_ips == ["static_ips_value"] + assert response.next_page_token == "next_page_token_value" + + +def test_fetch_static_ips_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + client.fetch_static_ips() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.FetchStaticIpsRequest() + + +@pytest.mark.asyncio +async def test_fetch_static_ips_async( + transport: str = "grpc_asyncio", request_type=clouddms.FetchStaticIpsRequest +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.FetchStaticIpsResponse( + static_ips=["static_ips_value"], + next_page_token="next_page_token_value", + ) + ) + response = await client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.FetchStaticIpsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchStaticIpsAsyncPager) + assert response.static_ips == ["static_ips_value"] + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_fetch_static_ips_async_from_dict(): + await test_fetch_static_ips_async(request_type=dict) + + +def test_fetch_static_ips_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.FetchStaticIpsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + call.return_value = clouddms.FetchStaticIpsResponse() + client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_static_ips_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.FetchStaticIpsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.FetchStaticIpsResponse() + ) + await client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_fetch_static_ips_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.FetchStaticIpsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_static_ips( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_fetch_static_ips_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_static_ips( + clouddms.FetchStaticIpsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_fetch_static_ips_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.FetchStaticIpsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.FetchStaticIpsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_static_ips( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_fetch_static_ips_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_static_ips( + clouddms.FetchStaticIpsRequest(), + name="name_value", + ) + + +def test_fetch_static_ips_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token="def", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token="ghi", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), + ) + pager = client.fetch_static_ips(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + +def test_fetch_static_ips_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token="def", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token="ghi", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_static_ips(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_fetch_static_ips_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token="def", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token="ghi", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_static_ips( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) for i in responses) + + +@pytest.mark.asyncio +async def test_fetch_static_ips_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token="def", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token="ghi", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.fetch_static_ips(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataMigrationServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataMigrationServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = DataMigrationServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataMigrationServiceGrpcTransport, + ) + + +def test_data_migration_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataMigrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_data_migration_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DataMigrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_migration_jobs", + "get_migration_job", + "create_migration_job", + "update_migration_job", + "delete_migration_job", + "start_migration_job", + "stop_migration_job", + "resume_migration_job", + "promote_migration_job", + "verify_migration_job", + "restart_migration_job", + "generate_ssh_script", + "list_connection_profiles", + "get_connection_profile", + "create_connection_profile", + "update_connection_profile", + "delete_connection_profile", + "create_private_connection", + "get_private_connection", + "list_private_connections", + "delete_private_connection", + "get_conversion_workspace", + "list_conversion_workspaces", + "create_conversion_workspace", + "update_conversion_workspace", + "delete_conversion_workspace", + "seed_conversion_workspace", + "import_mapping_rules", + "convert_conversion_workspace", + "commit_conversion_workspace", + "rollback_conversion_workspace", + "apply_conversion_workspace", + "describe_database_entities", + "search_background_jobs", + "describe_conversion_workspace_revisions", + "fetch_static_ips", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_data_migration_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataMigrationServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_data_migration_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataMigrationServiceTransport() + adc.assert_called_once() + + +def test_data_migration_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataMigrationServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_data_migration_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_data_migration_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataMigrationServiceGrpcTransport, grpc_helpers), + (transports.DataMigrationServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_data_migration_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datamigration.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="datamigration.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_data_migration_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_data_migration_service_host_no_port(transport_name): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datamigration.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ("datamigration.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_data_migration_service_host_with_port(transport_name): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datamigration.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ("datamigration.googleapis.com:8000") + + +def test_data_migration_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataMigrationServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_data_migration_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataMigrationServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_data_migration_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_data_migration_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_migration_service_grpc_lro_client(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_migration_service_grpc_lro_async_client(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_connection_profile_path(): + project = "squid" + location = "clam" + connection_profile = "whelk" + expected = "projects/{project}/locations/{location}/connectionProfiles/{connection_profile}".format( + project=project, + location=location, + connection_profile=connection_profile, + ) + actual = DataMigrationServiceClient.connection_profile_path( + project, location, connection_profile + ) + assert expected == actual + + +def test_parse_connection_profile_path(): + expected = { + "project": "octopus", + "location": "oyster", + "connection_profile": "nudibranch", + } + path = DataMigrationServiceClient.connection_profile_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_connection_profile_path(path) + assert expected == actual + + +def test_conversion_workspace_path(): + project = "cuttlefish" + location = "mussel" + conversion_workspace = "winkle" + expected = "projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}".format( + project=project, + location=location, + conversion_workspace=conversion_workspace, + ) + actual = DataMigrationServiceClient.conversion_workspace_path( + project, location, conversion_workspace + ) + assert expected == actual + + +def test_parse_conversion_workspace_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "conversion_workspace": "abalone", + } + path = DataMigrationServiceClient.conversion_workspace_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_conversion_workspace_path(path) + assert expected == actual + + +def test_migration_job_path(): + project = "squid" + location = "clam" + migration_job = "whelk" + expected = ( + "projects/{project}/locations/{location}/migrationJobs/{migration_job}".format( + project=project, + location=location, + migration_job=migration_job, + ) + ) + actual = DataMigrationServiceClient.migration_job_path( + project, location, migration_job + ) + assert expected == actual + + +def test_parse_migration_job_path(): + expected = { + "project": "octopus", + "location": "oyster", + "migration_job": "nudibranch", + } + path = DataMigrationServiceClient.migration_job_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_migration_job_path(path) + assert expected == actual + + +def test_networks_path(): + project = "cuttlefish" + network = "mussel" + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + actual = DataMigrationServiceClient.networks_path(project, network) + assert expected == actual + + +def test_parse_networks_path(): + expected = { + "project": "winkle", + "network": "nautilus", + } + path = DataMigrationServiceClient.networks_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_networks_path(path) + assert expected == actual + + +def test_private_connection_path(): + project = "scallop" + location = "abalone" + private_connection = "squid" + expected = "projects/{project}/locations/{location}/privateConnections/{private_connection}".format( + project=project, + location=location, + private_connection=private_connection, + ) + actual = DataMigrationServiceClient.private_connection_path( + project, location, private_connection + ) + assert expected == actual + + +def test_parse_private_connection_path(): + expected = { + "project": "clam", + "location": "whelk", + "private_connection": "octopus", + } + path = DataMigrationServiceClient.private_connection_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_private_connection_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DataMigrationServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = DataMigrationServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DataMigrationServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = DataMigrationServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DataMigrationServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = DataMigrationServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = DataMigrationServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = DataMigrationServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DataMigrationServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = DataMigrationServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DataMigrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DataMigrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DataMigrationServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_delete_operation(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] -def test_data_migration_service_grpc_lro_client(): +def test_get_iam_policy_from_dict(): client = DataMigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() -def test_data_migration_service_grpc_lro_async_client(): +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): client = DataMigrationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() -def test_connection_profile_path(): - project = "squid" - location = "clam" - connection_profile = "whelk" - expected = "projects/{project}/locations/{location}/connectionProfiles/{connection_profile}".format( - project=project, - location=location, - connection_profile=connection_profile, - ) - actual = DataMigrationServiceClient.connection_profile_path( - project, location, connection_profile +def test_test_iam_permissions(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - assert expected == actual - -def test_parse_connection_profile_path(): - expected = { - "project": "octopus", - "location": "oyster", - "connection_profile": "nudibranch", - } - path = DataMigrationServiceClient.connection_profile_path(**expected) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_connection_profile_path(path) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + response = client.test_iam_permissions(request) -def test_migration_job_path(): - project = "cuttlefish" - location = "mussel" - migration_job = "winkle" - expected = ( - "projects/{project}/locations/{location}/migrationJobs/{migration_job}".format( - project=project, - location=location, - migration_job=migration_job, - ) - ) - actual = DataMigrationServiceClient.migration_job_path( - project, location, migration_job - ) - assert expected == actual + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_parse_migration_job_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "migration_job": "abalone", - } - path = DataMigrationServiceClient.migration_job_path(**expected) + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_migration_job_path(path) - assert expected == actual + assert response.permissions == ["permissions_value"] -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - actual = DataMigrationServiceClient.common_billing_account_path(billing_account) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = DataMigrationServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_common_billing_account_path(path) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + response = await client.test_iam_permissions(request) -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = DataMigrationServiceClient.common_folder_path(folder) - assert expected == actual + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = DataMigrationServiceClient.common_folder_path(**expected) + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_common_folder_path(path) - assert expected == actual + assert response.permissions == ["permissions_value"] -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, +def test_test_iam_permissions_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = DataMigrationServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = DataMigrationServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_common_organization_path(path) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) - actual = DataMigrationServiceClient.common_project_path(project) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = DataMigrationServiceClient.common_project_path(**expected) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_common_project_path(path) - assert expected == actual + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = DataMigrationServiceClient.common_location_path(project, location) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = DataMigrationServiceClient.common_location_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_common_location_path(path) - assert expected == actual + await client.test_iam_permissions(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] - with mock.patch.object( - transports.DataMigrationServiceTransport, "_prep_wrapped_messages" - ) as prep: - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) +def test_test_iam_permissions_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - transports.DataMigrationServiceTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = DataMigrationServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } ) - prep.assert_called_once_with(client_info) + call.assert_called() @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_test_iam_permissions_from_dict_async(): client = DataMigrationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() def test_transport_close():