From c56c9c13f79ca70c319fd1e0bdfcf4f5ec67df36 Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 16 Apr 2021 02:25:43 -0700 Subject: [PATCH 01/13] changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. --- google/cloud/datacatalog_v1beta1/__init__.py | 4 ++-- synth.metadata | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/google/cloud/datacatalog_v1beta1/__init__.py b/google/cloud/datacatalog_v1beta1/__init__.py index 8bc01583..16534418 100644 --- a/google/cloud/datacatalog_v1beta1/__init__.py +++ b/google/cloud/datacatalog_v1beta1/__init__.py @@ -103,7 +103,6 @@ "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", "CreateTaxonomyRequest", - "DataCatalogClient", "DeleteEntryGroupRequest", "DeleteEntryRequest", "DeletePolicyTagRequest", @@ -140,6 +139,7 @@ "ListTaxonomiesResponse", "LookupEntryRequest", "PolicyTag", + "PolicyTagManagerClient", "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldRequest", "Schema", @@ -165,5 +165,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "PolicyTagManagerClient", + "DataCatalogClient", ) diff --git a/synth.metadata b/synth.metadata index 6fe87e5d..7e914094 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-datacatalog.git", - "sha": "8c157542045cfdebe31876cfc2448590b48775c2" + "sha": "6772851d12a6432eaa662db468df233dbd3195f3" } }, { From 41127d5cb2932e95c5ccf9309bee15478e1121d1 Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 16 Apr 2021 02:28:56 -0700 Subject: [PATCH 02/13] feat: introduce google.type.Decimal Committer: @alexander-fenster PiperOrigin-RevId: 364894175 Source-Author: Google APIs Source-Date: Wed Mar 24 14:41:06 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: 15c5e21948ff6fbe41f91bdf04f6252f91a12d59 Source-Link: https://github.com/googleapis/googleapis/commit/15c5e21948ff6fbe41f91bdf04f6252f91a12d59 --- google/cloud/datacatalog_v1beta1/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/google/cloud/datacatalog_v1beta1/__init__.py b/google/cloud/datacatalog_v1beta1/__init__.py index 16534418..8bc01583 100644 --- a/google/cloud/datacatalog_v1beta1/__init__.py +++ b/google/cloud/datacatalog_v1beta1/__init__.py @@ -103,6 +103,7 @@ "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", "CreateTaxonomyRequest", + "DataCatalogClient", "DeleteEntryGroupRequest", "DeleteEntryRequest", "DeletePolicyTagRequest", @@ -139,7 +140,6 @@ "ListTaxonomiesResponse", "LookupEntryRequest", "PolicyTag", - "PolicyTagManagerClient", "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldRequest", "Schema", @@ -165,5 +165,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "DataCatalogClient", + "PolicyTagManagerClient", ) From 3859c5ed5f0dc0b5ae5468a39fa6bdd88600a29a Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 16 Apr 2021 02:31:50 -0700 Subject: [PATCH 03/13] feat: Policy Tag Manager v1 API service feat: new RenameTagTemplateFieldEnumValue API feat: adding fully_qualified_name in lookup and search feat: added DATAPROC_METASTORE integrated system along with new entry types: DATABASE and SERVICE docs: Documentation improvements PiperOrigin-RevId: 365620142 Source-Author: Google APIs Source-Date: Mon Mar 29 11:20:23 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: a1ab4d44db02d59ff58810c6d4182d84e4b9abaa Source-Link: https://github.com/googleapis/googleapis/commit/a1ab4d44db02d59ff58810c6d4182d84e4b9abaa --- docs/datacatalog_v1/policy_tag_manager.rst | 11 + .../policy_tag_manager_serialization.rst | 6 + docs/datacatalog_v1/services.rst | 2 + google/cloud/datacatalog_v1/__init__.py | 58 +- .../cloud/datacatalog_v1/proto/common.proto | 5 +- .../datacatalog_v1/proto/data_source.proto | 50 + .../datacatalog_v1/proto/datacatalog.proto | 426 +- .../proto/gcs_fileset_spec.proto | 2 +- .../proto/policytagmanager.proto | 452 ++ .../proto/policytagmanagerserialization.proto | 186 + .../cloud/datacatalog_v1/proto/schema.proto | 24 +- .../cloud/datacatalog_v1/proto/search.proto | 16 +- .../datacatalog_v1/proto/table_spec.proto | 5 +- google/cloud/datacatalog_v1/proto/tags.proto | 67 +- .../datacatalog_v1/proto/timestamps.proto | 2 +- .../services/data_catalog/async_client.py | 243 +- .../services/data_catalog/client.py | 266 +- .../services/data_catalog/transports/base.py | 14 + .../services/data_catalog/transports/grpc.py | 38 +- .../data_catalog/transports/grpc_asyncio.py | 39 +- .../services/policy_tag_manager/__init__.py | 24 + .../policy_tag_manager/async_client.py | 1302 +++++ .../services/policy_tag_manager/client.py | 1502 ++++++ .../services/policy_tag_manager/pagers.py | 285 ++ .../policy_tag_manager/transports/__init__.py | 35 + .../policy_tag_manager/transports/base.py | 294 ++ .../policy_tag_manager/transports/grpc.py | 597 +++ .../transports/grpc_asyncio.py | 610 +++ .../__init__.py | 24 + .../async_client.py | 313 ++ .../client.py | 490 ++ .../transports/__init__.py | 37 + .../transports/base.py | 142 + .../transports/grpc.py | 303 ++ .../transports/grpc_asyncio.py | 307 ++ google/cloud/datacatalog_v1/types/__init__.py | 54 + google/cloud/datacatalog_v1/types/common.py | 1 + .../cloud/datacatalog_v1/types/data_source.py | 49 + .../cloud/datacatalog_v1/types/datacatalog.py | 328 +- .../datacatalog_v1/types/policytagmanager.py | 396 ++ .../types/policytagmanagerserialization.py | 210 + google/cloud/datacatalog_v1/types/schema.py | 21 +- google/cloud/datacatalog_v1/types/search.py | 17 + .../cloud/datacatalog_v1/types/table_spec.py | 1 + google/cloud/datacatalog_v1/types/tags.py | 87 +- scripts/fixup_datacatalog_v1_keywords.py | 15 +- synth.metadata | 29 +- .../gapic/datacatalog_v1/test_data_catalog.py | 384 +- .../datacatalog_v1/test_policy_tag_manager.py | 4195 +++++++++++++++++ .../test_policy_tag_manager_serialization.py | 1297 +++++ 50 files changed, 14777 insertions(+), 484 deletions(-) create mode 100644 docs/datacatalog_v1/policy_tag_manager.rst create mode 100644 docs/datacatalog_v1/policy_tag_manager_serialization.rst create mode 100644 google/cloud/datacatalog_v1/proto/data_source.proto create mode 100644 google/cloud/datacatalog_v1/proto/policytagmanager.proto create mode 100644 google/cloud/datacatalog_v1/proto/policytagmanagerserialization.proto create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/client.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py create mode 100644 google/cloud/datacatalog_v1/types/data_source.py create mode 100644 google/cloud/datacatalog_v1/types/policytagmanager.py create mode 100644 google/cloud/datacatalog_v1/types/policytagmanagerserialization.py create mode 100644 tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py create mode 100644 tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py diff --git a/docs/datacatalog_v1/policy_tag_manager.rst b/docs/datacatalog_v1/policy_tag_manager.rst new file mode 100644 index 00000000..cc36cf29 --- /dev/null +++ b/docs/datacatalog_v1/policy_tag_manager.rst @@ -0,0 +1,11 @@ +PolicyTagManager +---------------------------------- + +.. automodule:: google.cloud.datacatalog_v1.services.policy_tag_manager + :members: + :inherited-members: + + +.. automodule:: google.cloud.datacatalog_v1.services.policy_tag_manager.pagers + :members: + :inherited-members: diff --git a/docs/datacatalog_v1/policy_tag_manager_serialization.rst b/docs/datacatalog_v1/policy_tag_manager_serialization.rst new file mode 100644 index 00000000..f7006d1b --- /dev/null +++ b/docs/datacatalog_v1/policy_tag_manager_serialization.rst @@ -0,0 +1,6 @@ +PolicyTagManagerSerialization +----------------------------------------------- + +.. automodule:: google.cloud.datacatalog_v1.services.policy_tag_manager_serialization + :members: + :inherited-members: diff --git a/docs/datacatalog_v1/services.rst b/docs/datacatalog_v1/services.rst index fd21338e..a70d3132 100644 --- a/docs/datacatalog_v1/services.rst +++ b/docs/datacatalog_v1/services.rst @@ -4,3 +4,5 @@ Services for Google Cloud Datacatalog v1 API :maxdepth: 2 data_catalog + policy_tag_manager + policy_tag_manager_serialization diff --git a/google/cloud/datacatalog_v1/__init__.py b/google/cloud/datacatalog_v1/__init__.py index 734df087..7f2e8be3 100644 --- a/google/cloud/datacatalog_v1/__init__.py +++ b/google/cloud/datacatalog_v1/__init__.py @@ -16,12 +16,18 @@ # from .services.data_catalog import DataCatalogClient +from .services.policy_tag_manager import PolicyTagManagerClient +from .services.policy_tag_manager_serialization import ( + PolicyTagManagerSerializationClient, +) from .types.common import IntegratedSystem +from .types.data_source import DataSource from .types.datacatalog import CreateEntryGroupRequest from .types.datacatalog import CreateEntryRequest from .types.datacatalog import CreateTagRequest from .types.datacatalog import CreateTagTemplateFieldRequest from .types.datacatalog import CreateTagTemplateRequest +from .types.datacatalog import DatabaseTableSpec from .types.datacatalog import DeleteEntryGroupRequest from .types.datacatalog import DeleteEntryRequest from .types.datacatalog import DeleteTagRequest @@ -40,6 +46,7 @@ from .types.datacatalog import ListTagsRequest from .types.datacatalog import ListTagsResponse from .types.datacatalog import LookupEntryRequest +from .types.datacatalog import RenameTagTemplateFieldEnumValueRequest from .types.datacatalog import RenameTagTemplateFieldRequest from .types.datacatalog import SearchCatalogRequest from .types.datacatalog import SearchCatalogResponse @@ -50,6 +57,28 @@ from .types.datacatalog import UpdateTagTemplateRequest from .types.gcs_fileset_spec import GcsFileSpec from .types.gcs_fileset_spec import GcsFilesetSpec +from .types.policytagmanager import CreatePolicyTagRequest +from .types.policytagmanager import CreateTaxonomyRequest +from .types.policytagmanager import DeletePolicyTagRequest +from .types.policytagmanager import DeleteTaxonomyRequest +from .types.policytagmanager import GetPolicyTagRequest +from .types.policytagmanager import GetTaxonomyRequest +from .types.policytagmanager import ListPolicyTagsRequest +from .types.policytagmanager import ListPolicyTagsResponse +from .types.policytagmanager import ListTaxonomiesRequest +from .types.policytagmanager import ListTaxonomiesResponse +from .types.policytagmanager import PolicyTag +from .types.policytagmanager import Taxonomy +from .types.policytagmanager import UpdatePolicyTagRequest +from .types.policytagmanager import UpdateTaxonomyRequest +from .types.policytagmanagerserialization import CrossRegionalSource +from .types.policytagmanagerserialization import ExportTaxonomiesRequest +from .types.policytagmanagerserialization import ExportTaxonomiesResponse +from .types.policytagmanagerserialization import ImportTaxonomiesRequest +from .types.policytagmanagerserialization import ImportTaxonomiesResponse +from .types.policytagmanagerserialization import InlineSource +from .types.policytagmanagerserialization import SerializedPolicyTag +from .types.policytagmanagerserialization import SerializedTaxonomy from .types.schema import ColumnSchema from .types.schema import Schema from .types.search import SearchCatalogResult @@ -73,37 +102,61 @@ "ColumnSchema", "CreateEntryGroupRequest", "CreateEntryRequest", + "CreatePolicyTagRequest", "CreateTagRequest", "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", + "CreateTaxonomyRequest", + "CrossRegionalSource", + "DataCatalogClient", + "DataSource", + "DatabaseTableSpec", "DeleteEntryGroupRequest", "DeleteEntryRequest", + "DeletePolicyTagRequest", "DeleteTagRequest", "DeleteTagTemplateFieldRequest", "DeleteTagTemplateRequest", + "DeleteTaxonomyRequest", "Entry", "EntryGroup", "EntryType", + "ExportTaxonomiesRequest", + "ExportTaxonomiesResponse", "FieldType", "GcsFileSpec", "GcsFilesetSpec", "GetEntryGroupRequest", "GetEntryRequest", + "GetPolicyTagRequest", "GetTagTemplateRequest", + "GetTaxonomyRequest", + "ImportTaxonomiesRequest", + "ImportTaxonomiesResponse", + "InlineSource", "IntegratedSystem", "ListEntriesRequest", "ListEntriesResponse", "ListEntryGroupsRequest", "ListEntryGroupsResponse", + "ListPolicyTagsRequest", + "ListPolicyTagsResponse", "ListTagsRequest", "ListTagsResponse", + "ListTaxonomiesRequest", + "ListTaxonomiesResponse", "LookupEntryRequest", + "PolicyTag", + "PolicyTagManagerClient", + "RenameTagTemplateFieldEnumValueRequest", "RenameTagTemplateFieldRequest", "Schema", "SearchCatalogRequest", "SearchCatalogResponse", "SearchCatalogResult", "SearchResultType", + "SerializedPolicyTag", + "SerializedTaxonomy", "SystemTimestamps", "TableSourceType", "TableSpec", @@ -111,11 +164,14 @@ "TagField", "TagTemplate", "TagTemplateField", + "Taxonomy", "UpdateEntryGroupRequest", "UpdateEntryRequest", + "UpdatePolicyTagRequest", "UpdateTagRequest", "UpdateTagTemplateFieldRequest", "UpdateTagTemplateRequest", + "UpdateTaxonomyRequest", "ViewSpec", - "DataCatalogClient", + "PolicyTagManagerSerializationClient", ) diff --git a/google/cloud/datacatalog_v1/proto/common.proto b/google/cloud/datacatalog_v1/proto/common.proto index bb31bceb..f8d4817c 100644 --- a/google/cloud/datacatalog_v1/proto/common.proto +++ b/google/cloud/datacatalog_v1/proto/common.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -35,4 +35,7 @@ enum IntegratedSystem { // Cloud Pub/Sub. CLOUD_PUBSUB = 2; + + // Dataproc Metastore. + DATAPROC_METASTORE = 3; } diff --git a/google/cloud/datacatalog_v1/proto/data_source.proto b/google/cloud/datacatalog_v1/proto/data_source.proto new file mode 100644 index 00000000..a4696a01 --- /dev/null +++ b/google/cloud/datacatalog_v1/proto/data_source.proto @@ -0,0 +1,50 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.datacatalog.v1; + +import "google/api/field_behavior.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.DataCatalog.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/datacatalog/v1;datacatalog"; +option java_multiple_files = true; +option java_outer_classname = "DataSourceProto"; +option java_package = "com.google.cloud.datacatalog.v1"; +option php_namespace = "Google\\Cloud\\DataCatalog\\V1"; +option ruby_package = "Google::Cloud::DataCatalog::V1"; + +// Describes the physical location of an entry. +message DataSource { + // Service name where the data is stored. + enum Service { + // Default unknown service. + SERVICE_UNSPECIFIED = 0; + + // Google Cloud Storage service. + CLOUD_STORAGE = 1; + + // BigQuery service. + BIGQUERY = 2; + } + + // Service in which the data is physically stored. + Service service = 1; + + // Full name of the resource as defined by the service, e.g. + // //bigquery.googleapis.com/projects/{project_id}/locations/{location}/datasets/{dataset_id}/tables/{table_id} + string resource = 2; +} diff --git a/google/cloud/datacatalog_v1/proto/datacatalog.proto b/google/cloud/datacatalog_v1/proto/datacatalog.proto index c5b700dd..3f356587 100644 --- a/google/cloud/datacatalog_v1/proto/datacatalog.proto +++ b/google/cloud/datacatalog_v1/proto/datacatalog.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -21,6 +21,7 @@ import "google/api/client.proto"; import "google/api/field_behavior.proto"; import "google/api/resource.proto"; import "google/cloud/datacatalog/v1/common.proto"; +import "google/cloud/datacatalog/v1/data_source.proto"; import "google/cloud/datacatalog/v1/gcs_fileset_spec.proto"; import "google/cloud/datacatalog/v1/schema.proto"; import "google/cloud/datacatalog/v1/search.proto"; @@ -31,6 +32,7 @@ import "google/iam/v1/iam_policy.proto"; import "google/iam/v1/policy.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; +import "google/protobuf/timestamp.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.DataCatalog.V1"; @@ -39,13 +41,16 @@ option java_multiple_files = true; option java_package = "com.google.cloud.datacatalog.v1"; option php_namespace = "Google\\Cloud\\DataCatalog\\V1"; option ruby_package = "Google::Cloud::DataCatalog::V1"; +option (google.api.resource_definition) = { + type: "datacatalog.googleapis.com/TagTemplateFieldEnumValue" + pattern: "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name}" +}; // Data Catalog API service allows clients to discover, understand, and manage // their data. service DataCatalog { option (google.api.default_host) = "datacatalog.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; // Searches Data Catalog for multiple resources like entries, tags that // match a query. @@ -53,7 +58,7 @@ service DataCatalog { // This is a custom method // (https://cloud.google.com/apis/design/custom_methods) and does not return // the complete resource, only the resource identifier and high level - // fields. Clients can subsequentally call `Get` methods. + // fields. Clients can subsequently call `Get` methods. // // Note that Data Catalog search queries do not guarantee full recall. Query // results that match your query may not be returned, even in subsequent @@ -126,8 +131,7 @@ service DataCatalog { // identified by the `name` parameter (see [Data Catalog Resource Project] // (https://cloud.google.com/data-catalog/docs/concepts/resource-project) for // more information). - rpc DeleteEntryGroup(DeleteEntryGroupRequest) - returns (google.protobuf.Empty) { + rpc DeleteEntryGroup(DeleteEntryGroupRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1/{name=projects/*/locations/*/entryGroups/*}" }; @@ -135,16 +139,15 @@ service DataCatalog { } // Lists entry groups. - rpc ListEntryGroups(ListEntryGroupsRequest) - returns (ListEntryGroupsResponse) { + rpc ListEntryGroups(ListEntryGroupsRequest) returns (ListEntryGroupsResponse) { option (google.api.http) = { get: "/v1/{parent=projects/*/locations/*}/entryGroups" }; option (google.api.method_signature) = "parent"; } - // Creates an entry. Only entries of 'FILESET' type or user-specified type can - // be created. + // Creates an entry. Only entries of types 'FILESET', 'CLUSTER', 'DATA_STREAM' + // or with a user-specified type can be created. // // Users should enable the Data Catalog API in the project identified by // the `parent` parameter (see [Data Catalog Resource Project] @@ -223,8 +226,7 @@ service DataCatalog { post: "/v1/{parent=projects/*/locations/*}/tagTemplates" body: "tag_template" }; - option (google.api.method_signature) = - "parent,tag_template_id,tag_template"; + option (google.api.method_signature) = "parent,tag_template_id,tag_template"; } // Gets a tag template. @@ -256,8 +258,7 @@ service DataCatalog { // the `name` parameter (see [Data Catalog Resource Project] // (https://cloud.google.com/data-catalog/docs/concepts/resource-project) for // more information). - rpc DeleteTagTemplate(DeleteTagTemplateRequest) - returns (google.protobuf.Empty) { + rpc DeleteTagTemplate(DeleteTagTemplateRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1/{name=projects/*/locations/*/tagTemplates/*}" }; @@ -269,14 +270,12 @@ service DataCatalog { // [Data Catalog Resource // Project](https://cloud.google.com/data-catalog/docs/concepts/resource-project) // for more information). - rpc CreateTagTemplateField(CreateTagTemplateFieldRequest) - returns (TagTemplateField) { + rpc CreateTagTemplateField(CreateTagTemplateFieldRequest) returns (TagTemplateField) { option (google.api.http) = { post: "/v1/{parent=projects/*/locations/*/tagTemplates/*}/fields" body: "tag_template_field" }; - option (google.api.method_signature) = - "parent,tag_template_field_id,tag_template_field"; + option (google.api.method_signature) = "parent,tag_template_field_id,tag_template_field"; } // Updates a field in a tag template. This method cannot be used to update the @@ -284,15 +283,13 @@ service DataCatalog { // identified by the `name` parameter (see [Data Catalog Resource Project] // (https://cloud.google.com/data-catalog/docs/concepts/resource-project) for // more information). - rpc UpdateTagTemplateField(UpdateTagTemplateFieldRequest) - returns (TagTemplateField) { + rpc UpdateTagTemplateField(UpdateTagTemplateFieldRequest) returns (TagTemplateField) { option (google.api.http) = { patch: "/v1/{name=projects/*/locations/*/tagTemplates/*/fields/*}" body: "tag_template_field" }; option (google.api.method_signature) = "name,tag_template_field"; - option (google.api.method_signature) = - "name,tag_template_field,update_mask"; + option (google.api.method_signature) = "name,tag_template_field,update_mask"; } // Renames a field in a tag template. The user should enable the Data Catalog @@ -300,8 +297,7 @@ service DataCatalog { // Resource // Project](https://cloud.google.com/data-catalog/docs/concepts/resource-project) // for more information). - rpc RenameTagTemplateField(RenameTagTemplateFieldRequest) - returns (TagTemplateField) { + rpc RenameTagTemplateField(RenameTagTemplateFieldRequest) returns (TagTemplateField) { option (google.api.http) = { post: "/v1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:rename" body: "*" @@ -309,13 +305,22 @@ service DataCatalog { option (google.api.method_signature) = "name,new_tag_template_field_id"; } + // Renames an enum value in a tag template. The enum values have to be unique + // within one enum field. + rpc RenameTagTemplateFieldEnumValue(RenameTagTemplateFieldEnumValueRequest) returns (TagTemplateField) { + option (google.api.http) = { + post: "/v1/{name=projects/*/locations/*/tagTemplates/*/fields/*/enumValues/*}:rename" + body: "*" + }; + option (google.api.method_signature) = "name,new_enum_value_display_name"; + } + // Deletes a field in a tag template and all uses of that field. // Users should enable the Data Catalog API in the project identified by // the `name` parameter (see [Data Catalog Resource Project] // (https://cloud.google.com/data-catalog/docs/concepts/resource-project) for // more information). - rpc DeleteTagTemplateField(DeleteTagTemplateFieldRequest) - returns (google.protobuf.Empty) { + rpc DeleteTagTemplateField(DeleteTagTemplateFieldRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1/{name=projects/*/locations/*/tagTemplates/*/fields/*}" }; @@ -391,8 +396,7 @@ service DataCatalog { // templates. // - `datacatalog.entries.setIamPolicy` to set policies on entries. // - `datacatalog.entryGroups.setIamPolicy` to set policies on entry groups. - rpc SetIamPolicy(google.iam.v1.SetIamPolicyRequest) - returns (google.iam.v1.Policy) { + rpc SetIamPolicy(google.iam.v1.SetIamPolicyRequest) returns (google.iam.v1.Policy) { option (google.api.http) = { post: "/v1/{resource=projects/*/locations/*/tagTemplates/*}:setIamPolicy" body: "*" @@ -420,8 +424,7 @@ service DataCatalog { // templates. // - `datacatalog.entries.getIamPolicy` to get policies on entries. // - `datacatalog.entryGroups.getIamPolicy` to get policies on entry groups. - rpc GetIamPolicy(google.iam.v1.GetIamPolicyRequest) - returns (google.iam.v1.Policy) { + rpc GetIamPolicy(google.iam.v1.GetIamPolicyRequest) returns (google.iam.v1.Policy) { option (google.api.http) = { post: "/v1/{resource=projects/*/locations/*/tagTemplates/*}:getIamPolicy" body: "*" @@ -450,8 +453,7 @@ service DataCatalog { // // A caller is not required to have Google IAM permission to make this // request. - rpc TestIamPermissions(google.iam.v1.TestIamPermissionsRequest) - returns (google.iam.v1.TestIamPermissionsResponse) { + rpc TestIamPermissions(google.iam.v1.TestIamPermissionsRequest) returns (google.iam.v1.TestIamPermissionsResponse) { option (google.api.http) = { post: "/v1/{resource=projects/*/locations/*/tagTemplates/*}:testIamPermissions" body: "*" @@ -490,41 +492,15 @@ message SearchCatalogRequest { // Optional. The list of locations to search within. // 1. If empty, search will be performed in all locations; - // 2. If any of the locations are NOT in the valid locations list, error - // will be returned; + // 2. If any of the locations are NOT [supported + // regions](https://cloud.google.com/data-catalog/docs/concepts/regions#supported_regions), + // error will be returned; // 3. Otherwise, search only the given locations for matching results. // Typical usage is to leave this field empty. When a location is // unreachable as returned in the `SearchCatalogResponse.unreachable` field, // users can repeat the search request with this parameter set to get // additional information on the error. - // - // Valid locations: - // * asia-east1 - // * asia-east2 - // * asia-northeast1 - // * asia-northeast2 - // * asia-northeast3 - // * asia-south1 - // * asia-southeast1 - // * australia-southeast1 - // * eu - // * europe-north1 - // * europe-west1 - // * europe-west2 - // * europe-west3 - // * europe-west4 - // * europe-west6 - // * global - // * northamerica-northeast1 - // * southamerica-east1 - // * us - // * us-central1 - // * us-east1 - // * us-east4 - // * us-west1 - // * us-west2 - repeated string restricted_locations = 16 - [(google.api.field_behavior) = OPTIONAL]; + repeated string restricted_locations = 16 [(google.api.field_behavior) = OPTIONAL]; } // Required. The scope of this search request. A `scope` that has empty @@ -533,8 +509,8 @@ message SearchCatalogRequest { // return an error in such a case. Scope scope = 6 [(google.api.field_behavior) = REQUIRED]; - // Required. The query string in search query syntax. The query must be - // non-empty. + // Optional. The query string in search query syntax. An empty query string will result + // in all data assets (in the specified scope) that the user has access to. // // Query strings can be simple as "x" or more qualified as: // @@ -546,15 +522,15 @@ message SearchCatalogRequest { // matching to work correctly. See [Data Catalog Search // Syntax](https://cloud.google.com/data-catalog/docs/how-to/search-reference) // for more information. - string query = 1 [(google.api.field_behavior) = REQUIRED]; + string query = 1 [(google.api.field_behavior) = OPTIONAL]; // Number of results in the search page. If <=0 then defaults to 10. Max limit // for page_size is 1000. Throws an invalid argument for page_size > 1000. int32 page_size = 2; // Optional. Pagination token returned in an earlier - // [SearchCatalogResponse.next_page_token][google.cloud.datacatalog.v1.SearchCatalogResponse.next_page_token], - // which indicates that this is a continuation of a prior + // [SearchCatalogResponse.next_page_token][google.cloud.datacatalog.v1.SearchCatalogResponse.next_page_token], which + // indicates that this is a continuation of a prior // [SearchCatalogRequest][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog] // call, and that the system should return the next page of data. If empty, // the first page is returned. @@ -583,19 +559,19 @@ message SearchCatalogResponse { // Unreachable locations. Search result does not include data from those // locations. Users can get additional information on the error by repeating // the search request with a more restrictive parameter -- setting the value - // for `SearchDataCatalogRequest.scope.include_locations`. + // for `SearchDataCatalogRequest.scope.restricted_locations`. repeated string unreachable = 6; } // Request message for // [CreateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup]. message CreateEntryGroupRequest { - // Required. The name of the project this entry group is in. Example: + // Required. The name of the project this entry group belongs to. Example: // - // * projects/{project_id}/locations/{location} + // `projects/{project_id}/locations/{location}` // - // Note that this EntryGroup and its child resources may not actually be - // stored in the location in this name. + // Note: The entry group itself and its child resources might not be + // stored in the location specified in its name. string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -603,9 +579,11 @@ message CreateEntryGroupRequest { } ]; - // Required. The id of the entry group to create. - // The id must begin with a letter or underscore, contain only English - // letters, numbers and underscores, and be at most 64 characters. + // Required. The ID of the entry group to create. + // + // The ID must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and must start with a letter or underscore. + // The maximum size is 64 bytes when encoded in UTF-8. string entry_group_id = 3 [(google.api.field_behavior) = REQUIRED]; // The entry group to create. Defaults to an empty entry group. @@ -618,8 +596,11 @@ message UpdateEntryGroupRequest { // Required. The updated entry group. "name" field must be set. EntryGroup entry_group = 1 [(google.api.field_behavior) = REQUIRED]; - // The fields to update on the entry group. If absent or empty, all modifiable - // fields are updated. + // Names of fields whose values to overwrite on an entry group. + // + // If this parameter is absent or empty, all modifiable fields + // are overwritten. If such fields are non-required and omitted in the + // request body, their values are emptied. google.protobuf.FieldMask update_mask = 2; } @@ -658,8 +639,8 @@ message DeleteEntryGroupRequest { // Request message for // [ListEntryGroups][google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups]. message ListEntryGroupsRequest { - // Required. The name of the location that contains the entry groups, which - // can be provided in URL format. Example: + // Required. The name of the location that contains the entry groups, which can be + // provided in URL format. Example: // // * projects/{project_id}/locations/{location} string parent = 1 [ @@ -669,12 +650,12 @@ message ListEntryGroupsRequest { } ]; - // Optional. The maximum number of items to return. Default is 10. Max limit - // is 1000. Throws an invalid argument for `page_size > 1000`. + // Optional. The maximum number of items to return. Default is 10. Max limit is 1000. + // Throws an invalid argument for `page_size > 1000`. int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - // Optional. Token that specifies which page is requested. If empty, the first - // page is returned. + // Optional. Token that specifies which page is requested. If empty, the first page is + // returned. string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; } @@ -692,12 +673,12 @@ message ListEntryGroupsResponse { // Request message for // [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry]. message CreateEntryRequest { - // Required. The name of the entry group this entry is in. Example: + // Required. The name of the entry group this entry belongs to. Example: // - // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + // `projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}` // - // Note that this Entry and its child resources may not actually be stored in - // the location in this name. + // Note: The entry itself and its child resources might not be stored in + // the location specified in its name. string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -705,7 +686,11 @@ message CreateEntryRequest { } ]; - // Required. The id of the entry to create. + // Required. The ID of the entry to create. + // + // The ID must contain only letters (a-z, A-Z), numbers (0-9), + // and underscores (_). + // The maximum size is 64 bytes when encoded in UTF-8. string entry_id = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The entry to create. @@ -718,26 +703,30 @@ message UpdateEntryRequest { // Required. The updated entry. The "name" field must be set. Entry entry = 1 [(google.api.field_behavior) = REQUIRED]; - // The fields to update on the entry. If absent or empty, all modifiable - // fields are updated. + // Names of fields whose values to overwrite on an entry. + // + // If this parameter is absent or empty, all modifiable fields + // are overwritten. If such fields are non-required and omitted in the + // request body, their values are emptied. // // The following fields are modifiable: + // // * For entries with type `DATA_STREAM`: // * `schema` - // * For entries with type `FILESET` + // * For entries with type `FILESET`: // * `schema` // * `display_name` // * `description` // * `gcs_fileset_spec` // * `gcs_fileset_spec.file_patterns` - // * For entries with `user_specified_type` + // * For entries with `user_specified_type`: // * `schema` // * `display_name` // * `description` - // * user_specified_type - // * user_specified_system - // * linked_resource - // * source_system_timestamps + // * `user_specified_type` + // * `user_specified_system` + // * `linked_resource` + // * `source_system_timestamps` google.protobuf.FieldMask update_mask = 2; } @@ -796,9 +785,26 @@ message LookupEntryRequest { // * `bigquery.dataset.project_id.dataset_id` // * `datacatalog.entry.project_id.location_id.entry_group_id.entry_id` // - // `*_id`s shoud satisfy the standard SQL rules for identifiers. + // `*_id`s should satisfy the standard SQL rules for identifiers. // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical. string sql_resource = 3; + + // Fully qualified name (FQN) of the resource. + // + // FQNs take two forms: + // + // * For non-regionalized resources: + // + // `{SYSTEM}:{PROJECT}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}` + // + // * For regionalized resources: + // + // `{SYSTEM}:{PROJECT}.{LOCATION_ID}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}` + // + // Example for a DPMS table: + // + // `dataproc_metastore:project_id.location_id.instance_id.database_id.table_id` + string fully_qualified_name = 5; } } @@ -818,15 +824,19 @@ message Entry { pattern: "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}" }; - // The Data Catalog resource name of the entry in URL format. Example: + // Output only. The resource name of an entry in URL format. + // Example: // - // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + // `projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}` // - // Note that this Entry and its child resources may not actually be stored in - // the location in this name. - string name = 1 [(google.api.resource_reference) = { - type: "datacatalog.googleapis.com/EntryGroup" - }]; + // Note: The entry itself and its child resources might not be + // stored in the location specified in its name. + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/EntryGroup" + } + ]; // The resource this metadata entry refers to. // @@ -835,13 +845,39 @@ message Entry { // resource](https://cloud.google.com/apis/design/resource_names#full_resource_name). // For example, the `linked_resource` for a table resource from BigQuery is: // - // * //bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId + // `//bigquery.googleapis.com/projects/{projectId}/datasets/{datasetId}/tables/{tableId}` // - // Output only when Entry is of type in the EntryType enum. For entries with - // user_specified_type, this field is optional and defaults to an empty - // string. + // Output only when entry is one of the types in the `EntryType` enum. + // + // For entries with a `user_specified_type`, this field is optional and + // defaults to an empty string. + // + // The resource string must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), periods (.), colons (:), slashes (/), dashes (-), + // and hashes (#). + // The maximum size is 200 bytes when encoded in UTF-8. string linked_resource = 9; + // Fully qualified name (FQN) of the resource. Set automatically for entries + // representing resources from synced systems. Settable only during creation + // and read-only afterwards. Can be used for search and lookup of the entries. + // + // + // FQNs take two forms: + // + // * For non-regionalized resources: + // + // `{SYSTEM}:{PROJECT}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}` + // + // * For regionalized resources: + // + // `{SYSTEM}:{PROJECT}.{LOCATION_ID}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}` + // + // Example for a DPMS table: + // + // `dataproc_metastore:project_id.location_id.instance_id.database_id.table_id` + string fully_qualified_name = 29; + // Required. Entry type. oneof entry_type { // The type of the entry. @@ -863,10 +899,9 @@ message Entry { // The source system of the entry. oneof system { - // Output only. This field indicates the entry's source system that Data - // Catalog integrates with, such as BigQuery or Pub/Sub. - IntegratedSystem integrated_system = 17 - [(google.api.field_behavior) = OUTPUT_ONLY]; + // Output only. This field indicates the entry's source system that Data Catalog + // integrates with, such as BigQuery or Pub/Sub. + IntegratedSystem integrated_system = 17 [(google.api.field_behavior) = OUTPUT_ONLY]; // This field indicates the entry's source system that Data Catalog does not // integrate with. `user_specified_system` strings must begin with a letter @@ -892,13 +927,33 @@ message Entry { BigQueryDateShardedSpec bigquery_date_sharded_spec = 15; } - // Display information such as title and description. A short name to identify - // the entry, for example, "Analytics Data - Jan 2011". Default value is an - // empty string. + // Type- and system- specific information. Specifications for types contain + // fields common to all entries of a given type, and sub-specs with fields + // specific to a given source system. + // When extending the API with new types and systems please use this instead + // of legacy type_spec field. + oneof spec { + // Specification that applies to a table resource. Only valid + // for entries of `TABLE` type. + DatabaseTableSpec database_table_spec = 24; + } + + // Display name of an entry. + // + // The name must contain only Unicode letters, numbers (0-9), underscores (_), + // dashes (-), spaces ( ), and can't start or end with spaces. + // The maximum size is 200 bytes when encoded in UTF-8. + // Default value is an empty string. string display_name = 3; - // Entry description, which can consist of several sentences or paragraphs - // that describe entry contents. Default value is an empty string. + // Entry description that can consist of several sentences or paragraphs + // that describe entry contents. + // + // The description must not contain Unicode non-characters as well as C0 + // and C1 control codes except tabs (HT), new lines (LF), carriage returns + // (CR), and page breaks (FF). + // The maximum size is 2000 bytes when encoded in UTF-8. + // Default value is an empty string. string description = 4; // Schema of the entry. An entry might not have any schema attached to it. @@ -909,6 +964,28 @@ message Entry { // with user_specified_type, this field is optional and defaults to an empty // timestamp. SystemTimestamps source_system_timestamps = 7; + + // Output only. Physical location of the entry. + DataSource data_source = 20 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Specification that applies to a table resource. Only valid +// for entries of `TABLE` type. +message DatabaseTableSpec { + // Type of the table. + enum TableType { + // Default unknown table type. + TABLE_TYPE_UNSPECIFIED = 0; + + // Native table. + NATIVE = 1; + + // External table. + EXTERNAL = 2; + } + + // Type of this table. + TableType type = 1; } // EntryGroup Metadata. @@ -922,10 +999,10 @@ message EntryGroup { // The resource name of the entry group in URL format. Example: // - // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + // `projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}` // - // Note that this EntryGroup and its child resources may not actually be - // stored in the location in this name. + // Note: The entry group itself and its child resources might not be + // stored in the location specified in its name. string name = 1; // A short name to identify the entry group, for example, @@ -937,10 +1014,8 @@ message EntryGroup { // string. string description = 3; - // Output only. Timestamps about this EntryGroup. Default value is empty - // timestamps. - SystemTimestamps data_catalog_timestamps = 4 - [(google.api.field_behavior) = OUTPUT_ONLY]; + // Output only. Timestamps about this EntryGroup. Default value is empty timestamps. + SystemTimestamps data_catalog_timestamps = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Request message for @@ -959,7 +1034,11 @@ message CreateTagTemplateRequest { } ]; - // Required. The id of the tag template to create. + // Required. The ID of the tag template to create. + // + // The ID must contain only lowercase letters (a-z), numbers (0-9), + // or underscores (_), and must start with a letter or underscore. + // The maximum size is 64 bytes when encoded in UTF-8. string tag_template_id = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The tag template to create. @@ -986,13 +1065,12 @@ message UpdateTagTemplateRequest { // Required. The template to update. The "name" field must be set. TagTemplate tag_template = 1 [(google.api.field_behavior) = REQUIRED]; - // The field mask specifies the parts of the template to overwrite. - // - // Allowed fields: - // - // * `display_name` + // Names of fields whose values to overwrite on a tag template. Currently, + // only `display_name` can be overwritten. // - // If absent or empty, all of the allowed fields above will be updated. + // In general, if this parameter is absent or empty, all modifiable fields + // are overwritten. If such fields are non-required and omitted in the + // request body, their values are emptied. google.protobuf.FieldMask update_mask = 2; } @@ -1018,16 +1096,18 @@ message DeleteTagTemplateRequest { // Request message for // [CreateTag][google.cloud.datacatalog.v1.DataCatalog.CreateTag]. message CreateTagRequest { - // Required. The name of the resource to attach this tag to. Tags can be - // attached to Entries. Example: + // Required. The name of the resource to attach this tag to. Tags can be attached to + // entries. An entry can have up to 1000 attached tags. Example: // - // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + // `projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}` // - // Note that this Tag and its child resources may not actually be stored in - // the location in this name. + // Note: The tag and its child resources might not be stored in + // the location specified in its name. string parent = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { type: "datacatalog.googleapis.com/Tag" } + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/Tag" + } ]; // Required. The tag to create. @@ -1040,8 +1120,12 @@ message UpdateTagRequest { // Required. The updated tag. The "name" field must be set. Tag tag = 1 [(google.api.field_behavior) = REQUIRED]; - // The fields to update on the Tag. If absent or empty, all modifiable fields - // are updated. Currently the only modifiable field is the field `fields`. + // Names of fields whose values to overwrite on a tag. Currently, a tag has + // the only modifiable field with the name `fields`. + // + // In general, if this parameter is absent or empty, all modifiable fields + // are overwritten. If such fields are non-required and omitted in the + // request body, their values are emptied. google.protobuf.FieldMask update_mask = 2; } @@ -1076,15 +1160,17 @@ message CreateTagTemplateFieldRequest { ]; // Required. The ID of the tag template field to create. - // Field ids can contain letters (both uppercase and lowercase), numbers + // + // Note: Adding a required field to an existing template is *not* allowed. + // + // Field IDs can contain letters (both uppercase and lowercase), numbers // (0-9), underscores (_) and dashes (-). Field IDs must be at least 1 // character long and at most 128 characters long. Field IDs must also be // unique within their template. string tag_template_field_id = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The tag template field to create. - TagTemplateField tag_template_field = 3 - [(google.api.field_behavior) = REQUIRED]; + TagTemplateField tag_template_field = 3 [(google.api.field_behavior) = REQUIRED]; } // Request message for @@ -1101,25 +1187,25 @@ message UpdateTagTemplateFieldRequest { ]; // Required. The template to update. - TagTemplateField tag_template_field = 2 - [(google.api.field_behavior) = REQUIRED]; + TagTemplateField tag_template_field = 2 [(google.api.field_behavior) = REQUIRED]; - // Optional. The field mask specifies the parts of the template to be updated. - // Allowed fields: + // Optional. Names of fields whose values to overwrite on an individual field of a tag + // template. The following fields are modifiable: // // * `display_name` // * `type.enum_type` // * `is_required` // - // If `update_mask` is not set or empty, all of the allowed fields above will - // be updated. + // If this parameter is absent or empty, all modifiable fields + // are overwritten. If such fields are non-required and omitted in the request + // body, their values are emptied with one exception: when updating an enum + // type, the provided values are merged with the existing values. Therefore, + // enum values can only be added, existing enum values cannot be deleted or + // renamed. // - // When updating an enum type, the provided values will be merged with the - // existing values. Therefore, enum values can only be added, existing enum - // values cannot be deleted nor renamed. Updating a template field from - // optional to required is NOT allowed. - google.protobuf.FieldMask update_mask = 3 - [(google.api.field_behavior) = OPTIONAL]; + // Additionally, updating a template field from optional to required is + // *not* allowed. + google.protobuf.FieldMask update_mask = 3 [(google.api.field_behavior) = OPTIONAL]; } // Request message for @@ -1135,11 +1221,27 @@ message RenameTagTemplateFieldRequest { } ]; - // Required. The new ID of this tag template field. For example, - // `my_new_field`. + // Required. The new ID of this tag template field. For example, `my_new_field`. string new_tag_template_field_id = 2 [(google.api.field_behavior) = REQUIRED]; } +// Request message for +// [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. +message RenameTagTemplateFieldEnumValueRequest { + // Required. The name of the enum field value. Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/TagTemplateFieldEnumValue" + } + ]; + + // Required. The new display name of the enum value. For example, `my_new_enum_value`. + string new_enum_value_display_name = 2 [(google.api.field_behavior) = REQUIRED]; +} + // Request message for // [DeleteTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplateField]. message DeleteTagTemplateFieldRequest { @@ -1162,8 +1264,8 @@ message DeleteTagTemplateFieldRequest { // Request message for // [ListTags][google.cloud.datacatalog.v1.DataCatalog.ListTags]. message ListTagsRequest { - // Required. The name of the Data Catalog resource to list the tags of. The - // resource could be an [Entry][google.cloud.datacatalog.v1.Entry] or an + // Required. The name of the Data Catalog resource to list the tags of. The resource + // could be an [Entry][google.cloud.datacatalog.v1.Entry] or an // [EntryGroup][google.cloud.datacatalog.v1.EntryGroup]. // // Examples: @@ -1251,11 +1353,17 @@ enum EntryType { // https://cloud.google.com/bigquery-ml/docs/bigqueryml-intro MODEL = 5; - // Output only. An entry type which is used for streaming entries. Example: + // An entry type which is used for streaming entries. Example: // Pub/Sub topic. DATA_STREAM = 3; // An entry type which is a set of files or objects. Example: // Cloud Storage fileset. FILESET = 4; + + // A database. + DATABASE = 7; + + // A service, for example, a Dataproc Metastore service. + SERVICE = 14; } diff --git a/google/cloud/datacatalog_v1/proto/gcs_fileset_spec.proto b/google/cloud/datacatalog_v1/proto/gcs_fileset_spec.proto index bcf0ead6..01436429 100644 --- a/google/cloud/datacatalog_v1/proto/gcs_fileset_spec.proto +++ b/google/cloud/datacatalog_v1/proto/gcs_fileset_spec.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/google/cloud/datacatalog_v1/proto/policytagmanager.proto b/google/cloud/datacatalog_v1/proto/policytagmanager.proto new file mode 100644 index 00000000..cc52f7d1 --- /dev/null +++ b/google/cloud/datacatalog_v1/proto/policytagmanager.proto @@ -0,0 +1,452 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.datacatalog.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/datacatalog/v1/timestamps.proto"; +import "google/iam/v1/iam_policy.proto"; +import "google/iam/v1/policy.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.DataCatalog.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/datacatalog/v1;datacatalog"; +option java_multiple_files = true; +option java_outer_classname = "PolicyTagManagerProto"; +option java_package = "com.google.cloud.datacatalog.v1"; +option php_namespace = "Google\\Cloud\\DataCatalog\\V1"; +option ruby_package = "Google::Cloud::DataCatalog::V1"; + +// Policy Tag Manager API service allows clients to manage their policy tags and +// taxonomies. +// +// Policy tags are used to tag BigQuery columns and apply additional access +// control policies. A taxonomy is a hierarchical grouping of policy tags that +// classify data along a common axis. +service PolicyTagManager { + option (google.api.default_host) = "datacatalog.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a taxonomy in a specified project. The taxonomy is initially empty, + // i.e., does not contain policy tags. + rpc CreateTaxonomy(CreateTaxonomyRequest) returns (Taxonomy) { + option (google.api.http) = { + post: "/v1/{parent=projects/*/locations/*}/taxonomies" + body: "taxonomy" + }; + option (google.api.method_signature) = "parent,taxonomy"; + } + + // Deletes a taxonomy. This method will also delete all policy tags in this + // taxonomy, their associated policies, and the policy tags references from + // BigQuery columns. + rpc DeleteTaxonomy(DeleteTaxonomyRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=projects/*/locations/*/taxonomies/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Updates a taxonomy. This method can update the taxonomy's display name, + // description, and activated policy types. + rpc UpdateTaxonomy(UpdateTaxonomyRequest) returns (Taxonomy) { + option (google.api.http) = { + patch: "/v1/{taxonomy.name=projects/*/locations/*/taxonomies/*}" + body: "taxonomy" + }; + option (google.api.method_signature) = "taxonomy"; + } + + // Lists all taxonomies in a project in a particular location that the caller + // has permission to view. + rpc ListTaxonomies(ListTaxonomiesRequest) returns (ListTaxonomiesResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*/locations/*}/taxonomies" + }; + option (google.api.method_signature) = "parent"; + } + + // Gets a taxonomy. + rpc GetTaxonomy(GetTaxonomyRequest) returns (Taxonomy) { + option (google.api.http) = { + get: "/v1/{name=projects/*/locations/*/taxonomies/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Creates a policy tag in a taxonomy. + rpc CreatePolicyTag(CreatePolicyTagRequest) returns (PolicyTag) { + option (google.api.http) = { + post: "/v1/{parent=projects/*/locations/*/taxonomies/*}/policyTags" + body: "policy_tag" + }; + option (google.api.method_signature) = "parent,policy_tag"; + } + + // Deletes a policy tag. This method also deletes + // - all of its descendant policy tags, if any, + // - the policies associated with the policy tag and its descendants, and + // - references from BigQuery table schema of the policy tag and its + // descendants. + rpc DeletePolicyTag(DeletePolicyTagRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=projects/*/locations/*/taxonomies/*/policyTags/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Updates a policy tag. This method can update the policy tag's display + // name, description, and parent policy tag. + rpc UpdatePolicyTag(UpdatePolicyTagRequest) returns (PolicyTag) { + option (google.api.http) = { + patch: "/v1/{policy_tag.name=projects/*/locations/*/taxonomies/*/policyTags/*}" + body: "policy_tag" + }; + option (google.api.method_signature) = "policy_tag"; + } + + // Lists all policy tags in a taxonomy. + rpc ListPolicyTags(ListPolicyTagsRequest) returns (ListPolicyTagsResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*/locations/*/taxonomies/*}/policyTags" + }; + option (google.api.method_signature) = "parent"; + } + + // Gets a policy tag. + rpc GetPolicyTag(GetPolicyTagRequest) returns (PolicyTag) { + option (google.api.http) = { + get: "/v1/{name=projects/*/locations/*/taxonomies/*/policyTags/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Gets the IAM policy for a policy tag or a taxonomy. + rpc GetIamPolicy(google.iam.v1.GetIamPolicyRequest) returns (google.iam.v1.Policy) { + option (google.api.http) = { + post: "/v1/{resource=projects/*/locations/*/taxonomies/*}:getIamPolicy" + body: "*" + additional_bindings { + post: "/v1/{resource=projects/*/locations/*/taxonomies/*/policyTags/*}:getIamPolicy" + body: "*" + } + }; + } + + // Sets the IAM policy for a policy tag or a taxonomy. + rpc SetIamPolicy(google.iam.v1.SetIamPolicyRequest) returns (google.iam.v1.Policy) { + option (google.api.http) = { + post: "/v1/{resource=projects/*/locations/*/taxonomies/*}:setIamPolicy" + body: "*" + additional_bindings { + post: "/v1/{resource=projects/*/locations/*/taxonomies/*/policyTags/*}:setIamPolicy" + body: "*" + } + }; + } + + // Returns the permissions that a caller has on a specified policy tag or + // taxonomy. + rpc TestIamPermissions(google.iam.v1.TestIamPermissionsRequest) returns (google.iam.v1.TestIamPermissionsResponse) { + option (google.api.http) = { + post: "/v1/{resource=projects/*/locations/*/taxonomies/*}:testIamPermissions" + body: "*" + additional_bindings { + post: "/v1/{resource=projects/*/locations/*/taxonomies/*/policyTags/*}:testIamPermissions" + body: "*" + } + }; + } +} + +// A taxonomy is a collection of hierarchical policy tags that classify data +// along a common axis. For instance a "data sensitivity" taxonomy could contain +// the following policy tags: +// + PII +// + Account number +// + Age +// + SSN +// + Zipcode +// + Financials +// + Revenue +// A "data origin" taxonomy could contain the following policy tags: +// + User data +// + Employee data +// + Partner data +// + Public data +message Taxonomy { + option (google.api.resource) = { + type: "datacatalog.googleapis.com/Taxonomy" + pattern: "projects/{project}/locations/{location}/taxonomies/{taxonomy}" + }; + + // Defines policy types where the policy tags can be used for. + enum PolicyType { + // Unspecified policy type. + POLICY_TYPE_UNSPECIFIED = 0; + + // Fine-grained access control policy, which enables access control on + // tagged sub-resources. + FINE_GRAINED_ACCESS_CONTROL = 1; + } + + // Output only. Resource name of this taxonomy in format: + // "projects/{project_number}/locations/{location_id}/taxonomies/{taxonomy_id}". + // Note that taxonomy_id's are unique and generated by Policy Tag Manager. + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Required. User-defined name of this taxonomy. It must: contain only unicode letters, + // numbers, underscores, dashes and spaces; not start or end with spaces; and + // be at most 200 bytes long when encoded in UTF-8. + string display_name = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Description of this taxonomy. It must: contain only unicode characters, + // tabs, newlines, carriage returns and page breaks; and be at most 2000 bytes + // long when encoded in UTF-8. If not set, defaults to an empty description. + string description = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Output only. Number of policy tags contained in this taxonomy. + int32 policy_tag_count = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Timestamps about this taxonomy. Only create_time and update_time are used. + SystemTimestamps taxonomy_timestamps = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. A list of policy types that are activated for this taxonomy. If not set, + // defaults to an empty list. + repeated PolicyType activated_policy_types = 6 [(google.api.field_behavior) = OPTIONAL]; +} + +// Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be defined +// in a hierarchy. For example, consider the following hierarchy: +// + Geolocation +// + LatLong +// + City +// + ZipCode +// Policy tag "Geolocation" contains 3 child policy tags: "LatLong", "City", and +// "ZipCode". +message PolicyTag { + option (google.api.resource) = { + type: "datacatalog.googleapis.com/PolicyTag" + pattern: "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}" + }; + + // Output only. Resource name of this policy tag in format: + // "projects/{project_number}/locations/{location_id}/taxonomies/{taxonomy_id}/policyTags/{policy_tag_id}". + // Both taxonomy_ids and policy_tag_ids are unique and generated by Policy Tag + // Manager. + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Required. User-defined name of this policy tag. It must: be unique within the parent + // taxonomy; contain only unicode letters, numbers, underscores, dashes and + // spaces; not start or end with spaces; and be at most 200 bytes long when + // encoded in UTF-8. + string display_name = 2 [(google.api.field_behavior) = REQUIRED]; + + // Description of this policy tag. It must: contain only unicode characters, + // tabs, newlines, carriage returns and page breaks; and be at most 2000 bytes + // long when encoded in UTF-8. If not set, defaults to an empty description. + // If not set, defaults to an empty description. + string description = 3; + + // Resource name of this policy tag's parent policy tag (e.g. for the + // "LatLong" policy tag in the example above, this field contains the + // resource name of the "Geolocation" policy tag). If empty, it means this + // policy tag is a top level policy tag (e.g. this field is empty for the + // "Geolocation" policy tag in the example above). If not set, defaults to an + // empty string. + string parent_policy_tag = 4; + + // Output only. Resource names of child policy tags of this policy tag. + repeated string child_policy_tags = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Request message for +// [CreateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy]. +message CreateTaxonomyRequest { + // Required. Resource name of the project that the taxonomy will belong to. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "datacatalog.googleapis.com/Taxonomy" + } + ]; + + // The taxonomy to be created. + Taxonomy taxonomy = 2; +} + +// Request message for +// [DeleteTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy]. +message DeleteTaxonomyRequest { + // Required. Resource name of the taxonomy to be deleted. All policy tags in + // this taxonomy will also be deleted. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/Taxonomy" + } + ]; +} + +// Request message for +// [UpdateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy]. +message UpdateTaxonomyRequest { + // The taxonomy to update. Only description, display_name, and activated + // policy types can be updated. + Taxonomy taxonomy = 1; + + // The update mask applies to the resource. For the `FieldMask` definition, + // see + // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + // If not set, defaults to all of the fields that are allowed to update. + google.protobuf.FieldMask update_mask = 2; +} + +// Request message for +// [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. +message ListTaxonomiesRequest { + // Required. Resource name of the project to list the taxonomies of. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "datacatalog.googleapis.com/Taxonomy" + } + ]; + + // The maximum number of items to return. Must be a value between 1 and 1000. + // If not set, defaults to 50. + int32 page_size = 2; + + // The next_page_token value returned from a previous list request, if any. If + // not set, defaults to an empty string. + string page_token = 3; +} + +// Response message for +// [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. +message ListTaxonomiesResponse { + // Taxonomies that the project contains. + repeated Taxonomy taxonomies = 1; + + // Token used to retrieve the next page of results, or empty if there are no + // more results in the list. + string next_page_token = 2; +} + +// Request message for +// [GetTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy]. +message GetTaxonomyRequest { + // Required. Resource name of the requested taxonomy. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/Taxonomy" + } + ]; +} + +// Request message for +// [CreatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag]. +message CreatePolicyTagRequest { + // Required. Resource name of the taxonomy that the policy tag will belong to. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "datacatalog.googleapis.com/PolicyTag" + } + ]; + + // The policy tag to be created. + PolicyTag policy_tag = 2; +} + +// Request message for +// [DeletePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag]. +message DeletePolicyTagRequest { + // Required. Resource name of the policy tag to be deleted. All of its descendant + // policy tags will also be deleted. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/PolicyTag" + } + ]; +} + +// Request message for +// [UpdatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag]. +message UpdatePolicyTagRequest { + // The policy tag to update. Only the description, display_name, and + // parent_policy_tag fields can be updated. + PolicyTag policy_tag = 1; + + // The update mask applies to the resource. Only display_name, description and + // parent_policy_tag can be updated and thus can be listed in the mask. If + // update_mask is not provided, all allowed fields (i.e. display_name, + // description and parent) will be updated. For more information including the + // `FieldMask` definition, see + // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + // If not set, defaults to all of the fields that are allowed to update. + google.protobuf.FieldMask update_mask = 2; +} + +// Request message for +// [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. +message ListPolicyTagsRequest { + // Required. Resource name of the taxonomy to list the policy tags of. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "datacatalog.googleapis.com/PolicyTag" + } + ]; + + // The maximum number of items to return. Must be a value between 1 and 1000. + // If not set, defaults to 50. + int32 page_size = 2; + + // The next_page_token value returned from a previous List request, if any. If + // not set, defaults to an empty string. + string page_token = 3; +} + +// Response message for +// [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. +message ListPolicyTagsResponse { + // The policy tags that are in the requested taxonomy. + repeated PolicyTag policy_tags = 1; + + // Token used to retrieve the next page of results, or empty if there are no + // more results in the list. + string next_page_token = 2; +} + +// Request message for +// [GetPolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag]. +message GetPolicyTagRequest { + // Required. Resource name of the requested policy tag. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/PolicyTag" + } + ]; +} diff --git a/google/cloud/datacatalog_v1/proto/policytagmanagerserialization.proto b/google/cloud/datacatalog_v1/proto/policytagmanagerserialization.proto new file mode 100644 index 00000000..29b1d3df --- /dev/null +++ b/google/cloud/datacatalog_v1/proto/policytagmanagerserialization.proto @@ -0,0 +1,186 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.datacatalog.v1; + +import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/datacatalog/v1/policytagmanager.proto"; +import "google/iam/v1/policy.proto"; +import "google/api/client.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.DataCatalog.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/datacatalog/v1;datacatalog"; +option java_multiple_files = true; +option java_outer_classname = "PolicyTagManagerSerializationProto"; +option java_package = "com.google.cloud.datacatalog.v1"; +option php_namespace = "Google\\Cloud\\DataCatalog\\V1"; +option ruby_package = "Google::Cloud::DataCatalog::V1"; + +// Policy Tag Manager serialization API service allows clients to manipulate +// their policy tags and taxonomies in serialized format, where taxonomy is a +// hierarchical group of policy tags. +service PolicyTagManagerSerialization { + option (google.api.default_host) = "datacatalog.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates new taxonomies (including their policy tags) by importing from + // inlined source or cross-regional source. New taxonomies will be created in + // a given parent project. + // + // If using the cross-regional source, a new taxonomy is created by copying + // from a source in another region. + // + // If using the inlined source, this method provides a way to bulk create + // taxonomies and policy tags using nested proto structure. + rpc ImportTaxonomies(ImportTaxonomiesRequest) returns (ImportTaxonomiesResponse) { + option (google.api.http) = { + post: "/v1/{parent=projects/*/locations/*}/taxonomies:import" + body: "*" + }; + } + + // Exports taxonomies as the requested type and returns the taxonomies + // including their policy tags. The requested taxonomies must belong to one + // project. + // + // SerializedTaxonomy protos with nested policy tags that are generated by + // this method can be used as input for future ImportTaxonomies calls. + rpc ExportTaxonomies(ExportTaxonomiesRequest) returns (ExportTaxonomiesResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*/locations/*}/taxonomies:export" + }; + } +} + +// Message representing a taxonomy, including its policy tags in hierarchy, as a +// nested proto. Used for taxonomy replacement, import, and export. +message SerializedTaxonomy { + // Required. Display name of the taxonomy. At most 200 bytes when encoded in UTF-8. + string display_name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Description of the serialized taxonomy. At most 2000 bytes when encoded in + // UTF-8. If not set, defaults to an empty description. + string description = 2; + + // Top level policy tags associated with the taxonomy, if any. + repeated SerializedPolicyTag policy_tags = 3; + + // A list of policy types that are activated per taxonomy. + repeated Taxonomy.PolicyType activated_policy_types = 4; +} + +// Message representing one policy tag, including all its descendant policy +// tags, as a nested proto. +message SerializedPolicyTag { + // Resource name of the policy tag. + // + // This field will be ignored when calling ImportTaxonomies. + string policy_tag = 1; + + // Required. Display name of the policy tag. At most 200 bytes when encoded in UTF-8. + string display_name = 2 [(google.api.field_behavior) = REQUIRED]; + + // Description of the serialized policy tag. The length of the description is + // limited to 2000 bytes when encoded in UTF-8. If not set, defaults to an + // empty description. + string description = 3; + + // Children of the policy tag, if any. + repeated SerializedPolicyTag child_policy_tags = 4; +} + +// Request message for +// [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. +message ImportTaxonomiesRequest { + // Required. Resource name of project that the imported taxonomies will belong to. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "datacatalog.googleapis.com/Taxonomy" + } + ]; + + // Source taxonomies to be imported. + oneof source { + // Inline source used for taxonomies import. + InlineSource inline_source = 2; + + // Cross-regional source taxonomy to be imported. + CrossRegionalSource cross_regional_source = 3; + } +} + +// Inline source containing taxonomies to import. +message InlineSource { + // Required. Taxonomies to be imported. + repeated SerializedTaxonomy taxonomies = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// Cross-regional source used to import an existing taxonomy into a different +// region. +message CrossRegionalSource { + // Required. The resource name of the source taxonomy to be imported. + string taxonomy = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/Taxonomy" + } + ]; +} + +// Response message for +// [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. +message ImportTaxonomiesResponse { + // Taxonomies that were imported. + repeated Taxonomy taxonomies = 1; +} + +// Request message for +// [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. +message ExportTaxonomiesRequest { + // Required. Resource name of the project that the exported taxonomies belong to. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "datacatalog.googleapis.com/Taxonomy" + } + ]; + + // Required. Resource names of the taxonomies to be exported. + repeated string taxonomies = 2 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/Taxonomy" + } + ]; + + // Required. Taxonomies export destination. + oneof destination { + // Export taxonomies as serialized taxonomies, which contain all the policy + // tags as nested protos. + bool serialized_taxonomies = 3; + } +} + +// Response message for +// [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. +message ExportTaxonomiesResponse { + // List of taxonomies and policy tags as nested protos. + repeated SerializedTaxonomy taxonomies = 1; +} diff --git a/google/cloud/datacatalog_v1/proto/schema.proto b/google/cloud/datacatalog_v1/proto/schema.proto index c34d99e2..24f7fb88 100644 --- a/google/cloud/datacatalog_v1/proto/schema.proto +++ b/google/cloud/datacatalog_v1/proto/schema.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -28,25 +28,37 @@ option ruby_package = "Google::Cloud::DataCatalog::V1"; // Represents a schema (e.g. BigQuery, GoogleSQL, Avro schema). message Schema { - // Required. Schema of columns. A maximum of 10,000 columns and sub-columns can be - // specified. - repeated ColumnSchema columns = 2 [(google.api.field_behavior) = REQUIRED]; + // The unified GoogleSQL-like schema of columns. + // + // The overall maximum number of columns and nested columns is 10,000. + // The maximum nested depth is 15 levels. + repeated ColumnSchema columns = 2; } // Representation of a column within a schema. Columns could be nested inside // other columns. message ColumnSchema { // Required. Name of the column. + // + // Must be a UTF-8 string without dots (.). + // The maximum size is 64 bytes. string column = 6 [(google.api.field_behavior) = REQUIRED]; // Required. Type of the column. + // + // Must be a UTF-8 string with the maximum size of 128 bytes. string type = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Description of the column. Default value is an empty string. + // + // The description must be a UTF-8 string with the maximum size of 2000 + // bytes. string description = 2 [(google.api.field_behavior) = OPTIONAL]; - // Optional. A column's mode indicates whether the values in this column are required, - // nullable, etc. Only `NULLABLE`, `REQUIRED` and `REPEATED` are supported. + // Optional. A column's mode indicates if values in this column are required, + // nullable, or repeated. + // + // Only `NULLABLE`, `REQUIRED`, and `REPEATED` values are supported. // Default mode is `NULLABLE`. string mode = 3 [(google.api.field_behavior) = OPTIONAL]; diff --git a/google/cloud/datacatalog_v1/proto/search.proto b/google/cloud/datacatalog_v1/proto/search.proto index 37f6923b..09373923 100644 --- a/google/cloud/datacatalog_v1/proto/search.proto +++ b/google/cloud/datacatalog_v1/proto/search.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -55,6 +55,9 @@ message SearchCatalogResult { // * `//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId` string linked_resource = 4; + // Last-modified timestamp of the entry from the managing system. + google.protobuf.Timestamp modify_time = 7; + // The source system of the entry. Only applicable when `search_result_type` // is ENTRY. oneof system { @@ -66,6 +69,17 @@ message SearchCatalogResult { // integrate with. string user_specified_system = 9; } + + // Fully Qualified Name of the resource. + // There are two main forms of FQNs: + // {system}:{project}.{dot-separated path to resource} + // for non-regionalized resources + // {system}:{project}.{location id}.{dot-separated path to resource} + // for regionalized resources + // Examples: + // * dataproc_metastore:projectId.locationId.instanceId.databaseId.tableId + // * bigquery:table.project_id.dataset_id.table_id + string fully_qualified_name = 10; } // The different types of resources that can be returned in search. diff --git a/google/cloud/datacatalog_v1/proto/table_spec.proto b/google/cloud/datacatalog_v1/proto/table_spec.proto index c87afc54..43ce0df1 100644 --- a/google/cloud/datacatalog_v1/proto/table_spec.proto +++ b/google/cloud/datacatalog_v1/proto/table_spec.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -54,6 +54,9 @@ enum TableSourceType { // BigQuery native table. BIGQUERY_TABLE = 5; + + // BigQuery materialized view. + BIGQUERY_MATERIALIZED_VIEW = 7; } // Table view specification. diff --git a/google/cloud/datacatalog_v1/proto/tags.proto b/google/cloud/datacatalog_v1/proto/tags.proto index 4efefa52..9ef2e452 100644 --- a/google/cloud/datacatalog_v1/proto/tags.proto +++ b/google/cloud/datacatalog_v1/proto/tags.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -42,15 +42,17 @@ message Tag { // The resource name of the tag in URL format. Example: // - // * projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} + // `projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id}` // // where `tag_id` is a system-generated identifier. - // Note that this Tag may not actually be stored in the location in this name. + // + // Note: The tag itself might not be stored in the location specified in its + // name. string name = 1; // Required. The resource name of the tag template that this tag uses. Example: // - // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + // `projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}` // // This field cannot be modified after creation. string template = 2 [(google.api.field_behavior) = REQUIRED]; @@ -63,13 +65,11 @@ message Tag { // Deleting the scope from the parent resource will delete all tags attached // to that scope. These fields cannot be updated after creation. oneof scope { - // Resources like Entry can have schemas associated with them. This scope + // Resources like entry can have schemas associated with them. This scope // allows users to attach tags to an individual column based on that schema. // - // For attaching a tag to a nested column, use `.` to separate the column - // names. Example: - // - // * `outer_column.inner_column` + // To attach a tag to a nested column, separate column names with a dot + // (`.`). Example: `column.nested_column`. string column = 4; } @@ -97,6 +97,7 @@ message TagField { double double_value = 2; // Holds the value for a tag field with string type. + // The maximum length is 2000 UTF-8 characters. string string_value = 3; // Holds the value for a tag field with boolean type. @@ -134,13 +135,17 @@ message TagTemplate { // The resource name of the tag template in URL format. Example: // - // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + // `projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}` // - // Note that this TagTemplate and its child resources may not actually be - // stored in the location in this name. + // Note: The tag template itself and its child resources might not be + // stored in the location specified in its name. string name = 1; - // The display name for this template. Defaults to an empty string. + // Display name for this template. Defaults to an empty string. + // + // The name must contain only Unicode letters, numbers (0-9), underscores (_), + // dashes (-), spaces ( ), and can't start or end with spaces. + // The maximum length is 200 characters. string display_name = 2; // Required. Map of tag template field IDs to the settings for the field. @@ -163,13 +168,21 @@ message TagTemplateField { // Output only. The resource name of the tag template field in URL format. Example: // - // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field} + // `projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field}` + // + // Note: The `TagTemplateField` itself might not be stored in the location + // specified in its name. // - // Note that this TagTemplateField may not actually be stored in the location - // in this name. + // The name must contain only letters (a-z, A-Z), numbers (0-9), + // or underscores (_), and must start with a letter or underscore. + // The maximum length is 64 characters. string name = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // The display name for this field. Defaults to an empty string. + // + // The name must contain only Unicode letters, numbers (0-9), underscores (_), + // dashes (-), spaces ( ), and can't start or end with spaces. + // The maximum length is 200 characters. string display_name = 1; // Required. The type of value this tag field can contain. @@ -178,6 +191,9 @@ message TagTemplateField { // Whether this is a required field. Defaults to false. bool is_required = 3; + // The description for this field. Defaults to an empty string. + string description = 4; + // The order of this field with respect to other fields in this tag // template. For example, a higher value can indicate a more important field. // The value can be negative. Multiple fields can have the same order, and @@ -189,15 +205,22 @@ message FieldType { message EnumType { message EnumValue { // Required. The display name of the enum value. Must not be an empty string. + // + // The name must contain only Unicode letters, numbers (0-9), underscores + // (_), dashes (-), spaces ( ), and can't start or end with spaces. The + // maximum length is 200 characters. string display_name = 1 [(google.api.field_behavior) = REQUIRED]; } - // Required on create; optional on update. The set of allowed values for - // this enum. This set must not be empty, the display names of the values in - // this set must not be empty and the display names of the values must be - // case-insensitively unique within this set. Currently, enum values can - // only be added to the list of allowed values. Deletion and renaming of - // enum values are not supported. Can have up to 500 allowed values. + // The set of allowed values for this enum. + // + // This set must not be empty and can include up to 100 allowed values. + // The display names of the values in this set must not be empty and must + // be case-insensitively unique within this set. + // + // The order of items in this set is preserved. This field can be used to + // create, remove and reorder enum values. To rename enum values, use the + // `RenameTagTemplateFieldEnumValue` method. repeated EnumValue allowed_values = 1; } diff --git a/google/cloud/datacatalog_v1/proto/timestamps.proto b/google/cloud/datacatalog_v1/proto/timestamps.proto index a4372ae3..6566ce68 100644 --- a/google/cloud/datacatalog_v1/proto/timestamps.proto +++ b/google/cloud/datacatalog_v1/proto/timestamps.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/google/cloud/datacatalog_v1/services/data_catalog/async_client.py b/google/cloud/datacatalog_v1/services/data_catalog/async_client.py index 5964dba2..9eb89baf 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/async_client.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/async_client.py @@ -30,6 +30,7 @@ from google.cloud.datacatalog_v1.services.data_catalog import pagers from google.cloud.datacatalog_v1.types import common +from google.cloud.datacatalog_v1.types import data_source from google.cloud.datacatalog_v1.types import datacatalog from google.cloud.datacatalog_v1.types import gcs_fileset_spec from google.cloud.datacatalog_v1.types import schema @@ -68,6 +69,12 @@ class DataCatalogAsyncClient: parse_tag_template_field_path = staticmethod( DataCatalogClient.parse_tag_template_field_path ) + tag_template_field_enum_value_path = staticmethod( + DataCatalogClient.tag_template_field_enum_value_path + ) + parse_tag_template_field_enum_value_path = staticmethod( + DataCatalogClient.parse_tag_template_field_enum_value_path + ) common_billing_account_path = staticmethod( DataCatalogClient.common_billing_account_path @@ -203,7 +210,7 @@ async def search_catalog( This is a custom method (https://cloud.google.com/apis/design/custom_methods) and does not return the complete resource, only the resource identifier - and high level fields. Clients can subsequentally call ``Get`` + and high level fields. Clients can subsequently call ``Get`` methods. Note that Data Catalog search queries do not guarantee full @@ -230,8 +237,9 @@ async def search_catalog( on the ``request`` instance; if ``request`` is provided, this should not be set. query (:class:`str`): - Required. The query string in search query syntax. The - query must be non-empty. + Optional. The query string in search query syntax. An + empty query string will result in all data assets (in + the specified scope) that the user has access to. Query strings can be simple as "x" or more qualified as: @@ -349,24 +357,25 @@ async def create_entry_group( The request object. Request message for [CreateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup]. parent (:class:`str`): - Required. The name of the project this entry group is - in. Example: + Required. The name of the project this entry group + belongs to. Example: - - projects/{project_id}/locations/{location} + ``projects/{project_id}/locations/{location}`` - Note that this EntryGroup and its child resources may - not actually be stored in the location in this name. + Note: The entry group itself and its child resources + might not be stored in the location specified in its + name. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. entry_group_id (:class:`str`): - Required. The id of the entry group - to create. The id must begin with a - letter or underscore, contain only - English letters, numbers and - underscores, and be at most 64 - characters. + Required. The ID of the entry group to create. + + The ID must contain only letters (a-z, A-Z), numbers + (0-9), underscores (_), and must start with a letter or + underscore. The maximum size is 64 bytes when encoded in + UTF-8. This corresponds to the ``entry_group_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -555,9 +564,13 @@ async def update_entry_group( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The fields to update on the entry - group. If absent or empty, all - modifiable fields are updated. + Names of fields whose values to + overwrite on an entry group. + If this parameter is absent or empty, + all modifiable fields are overwritten. + If such fields are non-required and + omitted in the request body, their + values are emptied. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -793,8 +806,8 @@ async def create_entry( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datacatalog.Entry: - r"""Creates an entry. Only entries of 'FILESET' type or - user-specified type can be created. + r"""Creates an entry. Only entries of types 'FILESET', 'CLUSTER', + 'DATA_STREAM' or with a user-specified type can be created. Users should enable the Data Catalog API in the project identified by the ``parent`` parameter (see [Data Catalog @@ -809,20 +822,23 @@ async def create_entry( The request object. Request message for [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry]. parent (:class:`str`): - Required. The name of the entry group this entry is in. - Example: + Required. The name of the entry group this entry belongs + to. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}`` - Note that this Entry and its child resources may not - actually be stored in the location in this name. + Note: The entry itself and its child resources might not + be stored in the location specified in its name. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. entry_id (:class:`str`): - Required. The id of the entry to - create. + Required. The ID of the entry to create. + + The ID must contain only letters (a-z, A-Z), numbers + (0-9), and underscores (_). The maximum size is 64 bytes + when encoded in UTF-8. This corresponds to the ``entry_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -925,8 +941,12 @@ async def update_entry( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The fields to update on the entry. If absent or empty, - all modifiable fields are updated. + Names of fields whose values to overwrite on an entry. + + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied. The following fields are modifiable: @@ -934,7 +954,7 @@ async def update_entry( - ``schema`` - - For entries with type ``FILESET`` + - For entries with type ``FILESET``: - ``schema`` - ``display_name`` @@ -942,15 +962,15 @@ async def update_entry( - ``gcs_fileset_spec`` - ``gcs_fileset_spec.file_patterns`` - - For entries with ``user_specified_type`` + - For entries with ``user_specified_type``: - ``schema`` - ``display_name`` - ``description`` - - user_specified_type - - user_specified_system - - linked_resource - - source_system_timestamps + - ``user_specified_type`` + - ``user_specified_system`` + - ``linked_resource`` + - ``source_system_timestamps`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1374,8 +1394,12 @@ async def create_tag_template( on the ``request`` instance; if ``request`` is provided, this should not be set. tag_template_id (:class:`str`): - Required. The id of the tag template - to create. + Required. The ID of the tag template to create. + + The ID must contain only lowercase letters (a-z), + numbers (0-9), or underscores (_), and must start with a + letter or underscore. The maximum size is 64 bytes when + encoded in UTF-8. This corresponds to the ``tag_template_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -1560,15 +1584,14 @@ async def update_tag_template( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The field mask specifies the parts of the template to - overwrite. + Names of fields whose values to overwrite on a tag + template. Currently, only ``display_name`` can be + overwritten. - Allowed fields: - - - ``display_name`` - - If absent or empty, all of the allowed fields above will - be updated. + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their + values are emptied. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1754,7 +1777,11 @@ async def create_tag_template_field( should not be set. tag_template_field_id (:class:`str`): Required. The ID of the tag template field to create. - Field ids can contain letters (both uppercase and + + Note: Adding a required field to an existing template is + *not* allowed. + + Field IDs can contain letters (both uppercase and lowercase), numbers (0-9), underscores (_) and dashes (-). Field IDs must be at least 1 character long and at most 128 characters long. Field IDs must also be unique @@ -1861,21 +1888,24 @@ async def update_tag_template_field( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Optional. The field mask specifies the parts of the - template to be updated. Allowed fields: + Optional. Names of fields whose values to overwrite on + an individual field of a tag template. The following + fields are modifiable: - ``display_name`` - ``type.enum_type`` - ``is_required`` - If ``update_mask`` is not set or empty, all of the - allowed fields above will be updated. + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied with one exception: when updating an enum type, + the provided values are merged with the existing values. + Therefore, enum values can only be added, existing enum + values cannot be deleted or renamed. - When updating an enum type, the provided values will be - merged with the existing values. Therefore, enum values - can only be added, existing enum values cannot be - deleted nor renamed. Updating a template field from - optional to required is NOT allowed. + Additionally, updating a template field from optional to + required is *not* allowed. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2023,6 +2053,91 @@ async def rename_tag_template_field( # Done; return the response. return response + async def rename_tag_template_field_enum_value( + self, + request: datacatalog.RenameTagTemplateFieldEnumValueRequest = None, + *, + name: str = None, + new_enum_value_display_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Renames an enum value in a tag template. The enum + values have to be unique within one enum field. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.RenameTagTemplateFieldEnumValueRequest`): + The request object. Request message for + [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. + name (:class:`str`): + Required. The name of the enum field value. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_enum_value_display_name (:class:`str`): + Required. The new display name of the enum value. For + example, ``my_new_enum_value``. + + This corresponds to the ``new_enum_value_display_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_enum_value_display_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datacatalog.RenameTagTemplateFieldEnumValueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if new_enum_value_display_name is not None: + request.new_enum_value_display_name = new_enum_value_display_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rename_tag_template_field_enum_value, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + async def delete_tag_template_field( self, request: datacatalog.DeleteTagTemplateFieldRequest = None, @@ -2131,12 +2246,13 @@ async def create_tag( [CreateTag][google.cloud.datacatalog.v1.DataCatalog.CreateTag]. parent (:class:`str`): Required. The name of the resource to attach this tag - to. Tags can be attached to Entries. Example: + to. Tags can be attached to entries. An entry can have + up to 1000 attached tags. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}`` - Note that this Tag and its child resources may not - actually be stored in the location in this name. + Note: The tag and its child resources might not be + stored in the location specified in its name. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2229,9 +2345,14 @@ async def update_tag( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The fields to update on the Tag. If absent or empty, all - modifiable fields are updated. Currently the only - modifiable field is the field ``fields``. + Names of fields whose values to overwrite on a tag. + Currently, a tag has the only modifiable field with the + name ``fields``. + + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their + values are emptied. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/google/cloud/datacatalog_v1/services/data_catalog/client.py b/google/cloud/datacatalog_v1/services/data_catalog/client.py index ea8551d4..138398ab 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/client.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/client.py @@ -34,6 +34,7 @@ from google.cloud.datacatalog_v1.services.data_catalog import pagers from google.cloud.datacatalog_v1.types import common +from google.cloud.datacatalog_v1.types import data_source from google.cloud.datacatalog_v1.types import datacatalog from google.cloud.datacatalog_v1.types import gcs_fileset_spec from google.cloud.datacatalog_v1.types import schema @@ -253,6 +254,32 @@ def parse_tag_template_field_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def tag_template_field_enum_value_path( + project: str, + location: str, + tag_template: str, + tag_template_field_id: str, + enum_value_display_name: str, + ) -> str: + """Return a fully-qualified tag_template_field_enum_value string.""" + return "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name}".format( + project=project, + location=location, + tag_template=tag_template, + tag_template_field_id=tag_template_field_id, + enum_value_display_name=enum_value_display_name, + ) + + @staticmethod + def parse_tag_template_field_enum_value_path(path: str) -> Dict[str, str]: + """Parse a tag_template_field_enum_value path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)/fields/(?P.+?)/enumValues/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" @@ -441,7 +468,7 @@ def search_catalog( This is a custom method (https://cloud.google.com/apis/design/custom_methods) and does not return the complete resource, only the resource identifier - and high level fields. Clients can subsequentally call ``Get`` + and high level fields. Clients can subsequently call ``Get`` methods. Note that Data Catalog search queries do not guarantee full @@ -468,8 +495,9 @@ def search_catalog( on the ``request`` instance; if ``request`` is provided, this should not be set. query (str): - Required. The query string in search query syntax. The - query must be non-empty. + Optional. The query string in search query syntax. An + empty query string will result in all data assets (in + the specified scope) that the user has access to. Query strings can be simple as "x" or more qualified as: @@ -581,24 +609,25 @@ def create_entry_group( The request object. Request message for [CreateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup]. parent (str): - Required. The name of the project this entry group is - in. Example: + Required. The name of the project this entry group + belongs to. Example: - - projects/{project_id}/locations/{location} + ``projects/{project_id}/locations/{location}`` - Note that this EntryGroup and its child resources may - not actually be stored in the location in this name. + Note: The entry group itself and its child resources + might not be stored in the location specified in its + name. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. entry_group_id (str): - Required. The id of the entry group - to create. The id must begin with a - letter or underscore, contain only - English letters, numbers and - underscores, and be at most 64 - characters. + Required. The ID of the entry group to create. + + The ID must contain only letters (a-z, A-Z), numbers + (0-9), underscores (_), and must start with a letter or + underscore. The maximum size is 64 bytes when encoded in + UTF-8. This corresponds to the ``entry_group_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -782,9 +811,13 @@ def update_entry_group( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update on the entry - group. If absent or empty, all - modifiable fields are updated. + Names of fields whose values to + overwrite on an entry group. + If this parameter is absent or empty, + all modifiable fields are overwritten. + If such fields are non-required and + omitted in the request body, their + values are emptied. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1016,8 +1049,8 @@ def create_entry( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datacatalog.Entry: - r"""Creates an entry. Only entries of 'FILESET' type or - user-specified type can be created. + r"""Creates an entry. Only entries of types 'FILESET', 'CLUSTER', + 'DATA_STREAM' or with a user-specified type can be created. Users should enable the Data Catalog API in the project identified by the ``parent`` parameter (see [Data Catalog @@ -1032,20 +1065,23 @@ def create_entry( The request object. Request message for [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry]. parent (str): - Required. The name of the entry group this entry is in. - Example: + Required. The name of the entry group this entry belongs + to. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}`` - Note that this Entry and its child resources may not - actually be stored in the location in this name. + Note: The entry itself and its child resources might not + be stored in the location specified in its name. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. entry_id (str): - Required. The id of the entry to - create. + Required. The ID of the entry to create. + + The ID must contain only letters (a-z, A-Z), numbers + (0-9), and underscores (_). The maximum size is 64 bytes + when encoded in UTF-8. This corresponds to the ``entry_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -1149,8 +1185,12 @@ def update_entry( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update on the entry. If absent or empty, - all modifiable fields are updated. + Names of fields whose values to overwrite on an entry. + + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied. The following fields are modifiable: @@ -1158,7 +1198,7 @@ def update_entry( - ``schema`` - - For entries with type ``FILESET`` + - For entries with type ``FILESET``: - ``schema`` - ``display_name`` @@ -1166,15 +1206,15 @@ def update_entry( - ``gcs_fileset_spec`` - ``gcs_fileset_spec.file_patterns`` - - For entries with ``user_specified_type`` + - For entries with ``user_specified_type``: - ``schema`` - ``display_name`` - ``description`` - - user_specified_type - - user_specified_system - - linked_resource - - source_system_timestamps + - ``user_specified_type`` + - ``user_specified_system`` + - ``linked_resource`` + - ``source_system_timestamps`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1582,8 +1622,12 @@ def create_tag_template( on the ``request`` instance; if ``request`` is provided, this should not be set. tag_template_id (str): - Required. The id of the tag template - to create. + Required. The ID of the tag template to create. + + The ID must contain only lowercase letters (a-z), + numbers (0-9), or underscores (_), and must start with a + letter or underscore. The maximum size is 64 bytes when + encoded in UTF-8. This corresponds to the ``tag_template_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -1770,15 +1814,14 @@ def update_tag_template( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The field mask specifies the parts of the template to - overwrite. - - Allowed fields: + Names of fields whose values to overwrite on a tag + template. Currently, only ``display_name`` can be + overwritten. - - ``display_name`` - - If absent or empty, all of the allowed fields above will - be updated. + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their + values are emptied. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1966,7 +2009,11 @@ def create_tag_template_field( should not be set. tag_template_field_id (str): Required. The ID of the tag template field to create. - Field ids can contain letters (both uppercase and + + Note: Adding a required field to an existing template is + *not* allowed. + + Field IDs can contain letters (both uppercase and lowercase), numbers (0-9), underscores (_) and dashes (-). Field IDs must be at least 1 character long and at most 128 characters long. Field IDs must also be unique @@ -2076,21 +2123,24 @@ def update_tag_template_field( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. The field mask specifies the parts of the - template to be updated. Allowed fields: + Optional. Names of fields whose values to overwrite on + an individual field of a tag template. The following + fields are modifiable: - ``display_name`` - ``type.enum_type`` - ``is_required`` - If ``update_mask`` is not set or empty, all of the - allowed fields above will be updated. + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied with one exception: when updating an enum type, + the provided values are merged with the existing values. + Therefore, enum values can only be added, existing enum + values cannot be deleted or renamed. - When updating an enum type, the provided values will be - merged with the existing values. Therefore, enum values - can only be added, existing enum values cannot be - deleted nor renamed. Updating a template field from - optional to required is NOT allowed. + Additionally, updating a template field from optional to + required is *not* allowed. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2244,6 +2294,94 @@ def rename_tag_template_field( # Done; return the response. return response + def rename_tag_template_field_enum_value( + self, + request: datacatalog.RenameTagTemplateFieldEnumValueRequest = None, + *, + name: str = None, + new_enum_value_display_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Renames an enum value in a tag template. The enum + values have to be unique within one enum field. + + Args: + request (google.cloud.datacatalog_v1.types.RenameTagTemplateFieldEnumValueRequest): + The request object. Request message for + [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. + name (str): + Required. The name of the enum field value. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_enum_value_display_name (str): + Required. The new display name of the enum value. For + example, ``my_new_enum_value``. + + This corresponds to the ``new_enum_value_display_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_enum_value_display_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.RenameTagTemplateFieldEnumValueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.RenameTagTemplateFieldEnumValueRequest): + request = datacatalog.RenameTagTemplateFieldEnumValueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if new_enum_value_display_name is not None: + request.new_enum_value_display_name = new_enum_value_display_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.rename_tag_template_field_enum_value + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + def delete_tag_template_field( self, request: datacatalog.DeleteTagTemplateFieldRequest = None, @@ -2355,12 +2493,13 @@ def create_tag( [CreateTag][google.cloud.datacatalog.v1.DataCatalog.CreateTag]. parent (str): Required. The name of the resource to attach this tag - to. Tags can be attached to Entries. Example: + to. Tags can be attached to entries. An entry can have + up to 1000 attached tags. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}`` - Note that this Tag and its child resources may not - actually be stored in the location in this name. + Note: The tag and its child resources might not be + stored in the location specified in its name. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2454,9 +2593,14 @@ def update_tag( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update on the Tag. If absent or empty, all - modifiable fields are updated. Currently the only - modifiable field is the field ``fields``. + Names of fields whose values to overwrite on a tag. + Currently, a tag has the only modifiable field with the + name ``fields``. + + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their + values are emptied. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py index 0b3a9142..0dab8ad3 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py @@ -227,6 +227,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.rename_tag_template_field_enum_value: gapic_v1.method.wrap_method( + self.rename_tag_template_field_enum_value, + default_timeout=None, + client_info=client_info, + ), self.delete_tag_template_field: gapic_v1.method.wrap_method( self.delete_tag_template_field, default_timeout=None, @@ -455,6 +460,15 @@ def rename_tag_template_field( ]: raise NotImplementedError() + @property + def rename_tag_template_field_enum_value( + self, + ) -> typing.Callable[ + [datacatalog.RenameTagTemplateFieldEnumValueRequest], + typing.Union[tags.TagTemplateField, typing.Awaitable[tags.TagTemplateField]], + ]: + raise NotImplementedError() + @property def delete_tag_template_field( self, diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py index efca3a63..7206d1e1 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py @@ -240,7 +240,7 @@ def search_catalog( This is a custom method (https://cloud.google.com/apis/design/custom_methods) and does not return the complete resource, only the resource identifier - and high level fields. Clients can subsequentally call ``Get`` + and high level fields. Clients can subsequently call ``Get`` methods. Note that Data Catalog search queries do not guarantee full @@ -437,8 +437,8 @@ def create_entry( ) -> Callable[[datacatalog.CreateEntryRequest], datacatalog.Entry]: r"""Return a callable for the create entry method over gRPC. - Creates an entry. Only entries of 'FILESET' type or - user-specified type can be created. + Creates an entry. Only entries of types 'FILESET', 'CLUSTER', + 'DATA_STREAM' or with a user-specified type can be created. Users should enable the Data Catalog API in the project identified by the ``parent`` parameter (see [Data Catalog @@ -816,6 +816,38 @@ def rename_tag_template_field( ) return self._stubs["rename_tag_template_field"] + @property + def rename_tag_template_field_enum_value( + self, + ) -> Callable[ + [datacatalog.RenameTagTemplateFieldEnumValueRequest], tags.TagTemplateField + ]: + r"""Return a callable for the rename tag template field enum + value method over gRPC. + + Renames an enum value in a tag template. The enum + values have to be unique within one enum field. + + Returns: + Callable[[~.RenameTagTemplateFieldEnumValueRequest], + ~.TagTemplateField]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_tag_template_field_enum_value" not in self._stubs: + self._stubs[ + "rename_tag_template_field_enum_value" + ] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.DataCatalog/RenameTagTemplateFieldEnumValue", + request_serializer=datacatalog.RenameTagTemplateFieldEnumValueRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs["rename_tag_template_field_enum_value"] + @property def delete_tag_template_field( self, diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py index 9e712dfe..4b61e241 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py @@ -244,7 +244,7 @@ def search_catalog( This is a custom method (https://cloud.google.com/apis/design/custom_methods) and does not return the complete resource, only the resource identifier - and high level fields. Clients can subsequentally call ``Get`` + and high level fields. Clients can subsequently call ``Get`` methods. Note that Data Catalog search queries do not guarantee full @@ -448,8 +448,8 @@ def create_entry( ) -> Callable[[datacatalog.CreateEntryRequest], Awaitable[datacatalog.Entry]]: r"""Return a callable for the create entry method over gRPC. - Creates an entry. Only entries of 'FILESET' type or - user-specified type can be created. + Creates an entry. Only entries of types 'FILESET', 'CLUSTER', + 'DATA_STREAM' or with a user-specified type can be created. Users should enable the Data Catalog API in the project identified by the ``parent`` parameter (see [Data Catalog @@ -839,6 +839,39 @@ def rename_tag_template_field( ) return self._stubs["rename_tag_template_field"] + @property + def rename_tag_template_field_enum_value( + self, + ) -> Callable[ + [datacatalog.RenameTagTemplateFieldEnumValueRequest], + Awaitable[tags.TagTemplateField], + ]: + r"""Return a callable for the rename tag template field enum + value method over gRPC. + + Renames an enum value in a tag template. The enum + values have to be unique within one enum field. + + Returns: + Callable[[~.RenameTagTemplateFieldEnumValueRequest], + Awaitable[~.TagTemplateField]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_tag_template_field_enum_value" not in self._stubs: + self._stubs[ + "rename_tag_template_field_enum_value" + ] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.DataCatalog/RenameTagTemplateFieldEnumValue", + request_serializer=datacatalog.RenameTagTemplateFieldEnumValueRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs["rename_tag_template_field_enum_value"] + @property def delete_tag_template_field( self, diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py new file mode 100644 index 00000000..8abc6009 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import PolicyTagManagerClient +from .async_client import PolicyTagManagerAsyncClient + +__all__ = ( + "PolicyTagManagerClient", + "PolicyTagManagerAsyncClient", +) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py new file mode 100644 index 00000000..0fb150d4 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py @@ -0,0 +1,1302 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datacatalog_v1.services.policy_tag_manager import pagers +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import timestamps +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore + +from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport +from .client import PolicyTagManagerClient + + +class PolicyTagManagerAsyncClient: + """Policy Tag Manager API service allows clients to manage their + policy tags and taxonomies. + + Policy tags are used to tag BigQuery columns and apply + additional access control policies. A taxonomy is a hierarchical + grouping of policy tags that classify data along a common axis. + """ + + _client: PolicyTagManagerClient + + DEFAULT_ENDPOINT = PolicyTagManagerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PolicyTagManagerClient.DEFAULT_MTLS_ENDPOINT + + policy_tag_path = staticmethod(PolicyTagManagerClient.policy_tag_path) + parse_policy_tag_path = staticmethod(PolicyTagManagerClient.parse_policy_tag_path) + taxonomy_path = staticmethod(PolicyTagManagerClient.taxonomy_path) + parse_taxonomy_path = staticmethod(PolicyTagManagerClient.parse_taxonomy_path) + + common_billing_account_path = staticmethod( + PolicyTagManagerClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PolicyTagManagerClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(PolicyTagManagerClient.common_folder_path) + parse_common_folder_path = staticmethod( + PolicyTagManagerClient.parse_common_folder_path + ) + + common_organization_path = staticmethod( + PolicyTagManagerClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + PolicyTagManagerClient.parse_common_organization_path + ) + + common_project_path = staticmethod(PolicyTagManagerClient.common_project_path) + parse_common_project_path = staticmethod( + PolicyTagManagerClient.parse_common_project_path + ) + + common_location_path = staticmethod(PolicyTagManagerClient.common_location_path) + parse_common_location_path = staticmethod( + PolicyTagManagerClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerAsyncClient: The constructed client. + """ + return PolicyTagManagerClient.from_service_account_info.__func__(PolicyTagManagerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerAsyncClient: The constructed client. + """ + return PolicyTagManagerClient.from_service_account_file.__func__(PolicyTagManagerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PolicyTagManagerTransport: + """Return the transport used by the client instance. + + Returns: + PolicyTagManagerTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(PolicyTagManagerClient).get_transport_class, type(PolicyTagManagerClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, PolicyTagManagerTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the policy tag manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PolicyTagManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = PolicyTagManagerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_taxonomy( + self, + request: policytagmanager.CreateTaxonomyRequest = None, + *, + parent: str = None, + taxonomy: policytagmanager.Taxonomy = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Creates a taxonomy in a specified project. The + taxonomy is initially empty, i.e., does not contain + policy tags. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.CreateTaxonomyRequest`): + The request object. Request message for + [CreateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy]. + parent (:class:`str`): + Required. Resource name of the + project that the taxonomy will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + taxonomy (:class:`google.cloud.datacatalog_v1.types.Taxonomy`): + The taxonomy to be created. + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of + hierarchical policy tags that classify + data along a common axis. For instance a + "data sensitivity" taxonomy could + contain the following policy tags: + + PII + + Account number + + Age + + SSN + + Zipcode + + Financials + + Revenue + A "data origin" taxonomy could contain + the following policy tags: + User data + + Employee data + + Partner data + + Public data + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, taxonomy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.CreateTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_taxonomy( + self, + request: policytagmanager.DeleteTaxonomyRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a taxonomy. This method will also delete all + policy tags in this taxonomy, their associated policies, + and the policy tags references from BigQuery columns. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.DeleteTaxonomyRequest`): + The request object. Request message for + [DeleteTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy]. + name (:class:`str`): + Required. Resource name of the + taxonomy to be deleted. All policy tags + in this taxonomy will also be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.DeleteTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def update_taxonomy( + self, + request: policytagmanager.UpdateTaxonomyRequest = None, + *, + taxonomy: policytagmanager.Taxonomy = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Updates a taxonomy. This method can update the + taxonomy's display name, description, and activated + policy types. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.UpdateTaxonomyRequest`): + The request object. Request message for + [UpdateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy]. + taxonomy (:class:`google.cloud.datacatalog_v1.types.Taxonomy`): + The taxonomy to update. Only description, display_name, + and activated policy types can be updated. + + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of + hierarchical policy tags that classify + data along a common axis. For instance a + "data sensitivity" taxonomy could + contain the following policy tags: + + PII + + Account number + + Age + + SSN + + Zipcode + + Financials + + Revenue + A "data origin" taxonomy could contain + the following policy tags: + User data + + Employee data + + Partner data + + Public data + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([taxonomy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.UpdateTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("taxonomy.name", request.taxonomy.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_taxonomies( + self, + request: policytagmanager.ListTaxonomiesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTaxonomiesAsyncPager: + r"""Lists all taxonomies in a project in a particular + location that the caller has permission to view. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.ListTaxonomiesRequest`): + The request object. Request message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + parent (:class:`str`): + Required. Resource name of the + project to list the taxonomies of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListTaxonomiesAsyncPager: + Response message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.ListTaxonomiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_taxonomies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTaxonomiesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_taxonomy( + self, + request: policytagmanager.GetTaxonomyRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Gets a taxonomy. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.GetTaxonomyRequest`): + The request object. Request message for + [GetTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy]. + name (:class:`str`): + Required. Resource name of the + requested taxonomy. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of + hierarchical policy tags that classify + data along a common axis. For instance a + "data sensitivity" taxonomy could + contain the following policy tags: + + PII + + Account number + + Age + + SSN + + Zipcode + + Financials + + Revenue + A "data origin" taxonomy could contain + the following policy tags: + User data + + Employee data + + Partner data + + Public data + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.GetTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_policy_tag( + self, + request: policytagmanager.CreatePolicyTagRequest = None, + *, + parent: str = None, + policy_tag: policytagmanager.PolicyTag = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Creates a policy tag in a taxonomy. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.CreatePolicyTagRequest`): + The request object. Request message for + [CreatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag]. + parent (:class:`str`): + Required. Resource name of the + taxonomy that the policy tag will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + policy_tag (:class:`google.cloud.datacatalog_v1.types.PolicyTag`): + The policy tag to be created. + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy + (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider + the following hierarchy: + Geolocation + + LatLong + + City + + ZipCode + Policy tag "Geolocation" contains 3 + child policy tags: "LatLong", "City", + and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, policy_tag]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.CreatePolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_policy_tag( + self, + request: policytagmanager.DeletePolicyTagRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a policy tag. This method also deletes + - all of its descendant policy tags, if any, + - the policies associated with the policy tag and its + descendants, and - references from BigQuery table + schema of the policy tag and its descendants. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.DeletePolicyTagRequest`): + The request object. Request message for + [DeletePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag]. + name (:class:`str`): + Required. Resource name of the policy + tag to be deleted. All of its descendant + policy tags will also be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.DeletePolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def update_policy_tag( + self, + request: policytagmanager.UpdatePolicyTagRequest = None, + *, + policy_tag: policytagmanager.PolicyTag = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Updates a policy tag. This method can update the + policy tag's display name, description, and parent + policy tag. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.UpdatePolicyTagRequest`): + The request object. Request message for + [UpdatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag]. + policy_tag (:class:`google.cloud.datacatalog_v1.types.PolicyTag`): + The policy tag to update. Only the description, + display_name, and parent_policy_tag fields can be + updated. + + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy + (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider + the following hierarchy: + Geolocation + + LatLong + + City + + ZipCode + Policy tag "Geolocation" contains 3 + child policy tags: "LatLong", "City", + and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([policy_tag]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.UpdatePolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("policy_tag.name", request.policy_tag.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_policy_tags( + self, + request: policytagmanager.ListPolicyTagsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPolicyTagsAsyncPager: + r"""Lists all policy tags in a taxonomy. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.ListPolicyTagsRequest`): + The request object. Request message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + parent (:class:`str`): + Required. Resource name of the + taxonomy to list the policy tags of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListPolicyTagsAsyncPager: + Response message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.ListPolicyTagsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_policy_tags, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPolicyTagsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_policy_tag( + self, + request: policytagmanager.GetPolicyTagRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Gets a policy tag. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.GetPolicyTagRequest`): + The request object. Request message for + [GetPolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag]. + name (:class:`str`): + Required. Resource name of the + requested policy tag. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy + (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider + the following hierarchy: + Geolocation + + LatLong + + City + + ZipCode + Policy tag "Geolocation" contains 3 + child policy tags: "LatLong", "City", + and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.GetPolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the IAM policy for a policy tag or a taxonomy. + + Args: + request (:class:`google.iam.v1.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the IAM policy for a policy tag or a taxonomy. + + Args: + request (:class:`google.iam.v1.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Returns the permissions that a caller has on a + specified policy tag or taxonomy. + + Args: + request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datacatalog", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("PolicyTagManagerAsyncClient",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py new file mode 100644 index 00000000..7e4f27e5 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py @@ -0,0 +1,1502 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datacatalog_v1.services.policy_tag_manager import pagers +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import timestamps +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore + +from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import PolicyTagManagerGrpcTransport +from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport + + +class PolicyTagManagerClientMeta(type): + """Metaclass for the PolicyTagManager client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[PolicyTagManagerTransport]] + _transport_registry["grpc"] = PolicyTagManagerGrpcTransport + _transport_registry["grpc_asyncio"] = PolicyTagManagerGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[PolicyTagManagerTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PolicyTagManagerClient(metaclass=PolicyTagManagerClientMeta): + """Policy Tag Manager API service allows clients to manage their + policy tags and taxonomies. + + Policy tags are used to tag BigQuery columns and apply + additional access control policies. A taxonomy is a hierarchical + grouping of policy tags that classify data along a common axis. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "datacatalog.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PolicyTagManagerTransport: + """Return the transport used by the client instance. + + Returns: + PolicyTagManagerTransport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def policy_tag_path( + project: str, location: str, taxonomy: str, policy_tag: str, + ) -> str: + """Return a fully-qualified policy_tag string.""" + return "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format( + project=project, + location=location, + taxonomy=taxonomy, + policy_tag=policy_tag, + ) + + @staticmethod + def parse_policy_tag_path(path: str) -> Dict[str, str]: + """Parse a policy_tag path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)/policyTags/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def taxonomy_path(project: str, location: str, taxonomy: str,) -> str: + """Return a fully-qualified taxonomy string.""" + return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( + project=project, location=location, taxonomy=taxonomy, + ) + + @staticmethod + def parse_taxonomy_path(path: str) -> Dict[str, str]: + """Parse a taxonomy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, PolicyTagManagerTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the policy tag manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PolicyTagManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PolicyTagManagerTransport): + # transport is a PolicyTagManagerTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def create_taxonomy( + self, + request: policytagmanager.CreateTaxonomyRequest = None, + *, + parent: str = None, + taxonomy: policytagmanager.Taxonomy = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Creates a taxonomy in a specified project. The + taxonomy is initially empty, i.e., does not contain + policy tags. + + Args: + request (google.cloud.datacatalog_v1.types.CreateTaxonomyRequest): + The request object. Request message for + [CreateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy]. + parent (str): + Required. Resource name of the + project that the taxonomy will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): + The taxonomy to be created. + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of + hierarchical policy tags that classify + data along a common axis. For instance a + "data sensitivity" taxonomy could + contain the following policy tags: + + PII + + Account number + + Age + + SSN + + Zipcode + + Financials + + Revenue + A "data origin" taxonomy could contain + the following policy tags: + User data + + Employee data + + Partner data + + Public data + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, taxonomy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.CreateTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.CreateTaxonomyRequest): + request = policytagmanager.CreateTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_taxonomy( + self, + request: policytagmanager.DeleteTaxonomyRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a taxonomy. This method will also delete all + policy tags in this taxonomy, their associated policies, + and the policy tags references from BigQuery columns. + + Args: + request (google.cloud.datacatalog_v1.types.DeleteTaxonomyRequest): + The request object. Request message for + [DeleteTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy]. + name (str): + Required. Resource name of the + taxonomy to be deleted. All policy tags + in this taxonomy will also be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.DeleteTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.DeleteTaxonomyRequest): + request = policytagmanager.DeleteTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def update_taxonomy( + self, + request: policytagmanager.UpdateTaxonomyRequest = None, + *, + taxonomy: policytagmanager.Taxonomy = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Updates a taxonomy. This method can update the + taxonomy's display name, description, and activated + policy types. + + Args: + request (google.cloud.datacatalog_v1.types.UpdateTaxonomyRequest): + The request object. Request message for + [UpdateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy]. + taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): + The taxonomy to update. Only description, display_name, + and activated policy types can be updated. + + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of + hierarchical policy tags that classify + data along a common axis. For instance a + "data sensitivity" taxonomy could + contain the following policy tags: + + PII + + Account number + + Age + + SSN + + Zipcode + + Financials + + Revenue + A "data origin" taxonomy could contain + the following policy tags: + User data + + Employee data + + Partner data + + Public data + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([taxonomy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.UpdateTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.UpdateTaxonomyRequest): + request = policytagmanager.UpdateTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("taxonomy.name", request.taxonomy.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_taxonomies( + self, + request: policytagmanager.ListTaxonomiesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTaxonomiesPager: + r"""Lists all taxonomies in a project in a particular + location that the caller has permission to view. + + Args: + request (google.cloud.datacatalog_v1.types.ListTaxonomiesRequest): + The request object. Request message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + parent (str): + Required. Resource name of the + project to list the taxonomies of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListTaxonomiesPager: + Response message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.ListTaxonomiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.ListTaxonomiesRequest): + request = policytagmanager.ListTaxonomiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTaxonomiesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_taxonomy( + self, + request: policytagmanager.GetTaxonomyRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Gets a taxonomy. + + Args: + request (google.cloud.datacatalog_v1.types.GetTaxonomyRequest): + The request object. Request message for + [GetTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy]. + name (str): + Required. Resource name of the + requested taxonomy. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of + hierarchical policy tags that classify + data along a common axis. For instance a + "data sensitivity" taxonomy could + contain the following policy tags: + + PII + + Account number + + Age + + SSN + + Zipcode + + Financials + + Revenue + A "data origin" taxonomy could contain + the following policy tags: + User data + + Employee data + + Partner data + + Public data + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.GetTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.GetTaxonomyRequest): + request = policytagmanager.GetTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_policy_tag( + self, + request: policytagmanager.CreatePolicyTagRequest = None, + *, + parent: str = None, + policy_tag: policytagmanager.PolicyTag = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Creates a policy tag in a taxonomy. + + Args: + request (google.cloud.datacatalog_v1.types.CreatePolicyTagRequest): + The request object. Request message for + [CreatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag]. + parent (str): + Required. Resource name of the + taxonomy that the policy tag will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): + The policy tag to be created. + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy + (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider + the following hierarchy: + Geolocation + + LatLong + + City + + ZipCode + Policy tag "Geolocation" contains 3 + child policy tags: "LatLong", "City", + and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, policy_tag]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.CreatePolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.CreatePolicyTagRequest): + request = policytagmanager.CreatePolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_policy_tag( + self, + request: policytagmanager.DeletePolicyTagRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a policy tag. This method also deletes + - all of its descendant policy tags, if any, + - the policies associated with the policy tag and its + descendants, and - references from BigQuery table + schema of the policy tag and its descendants. + + Args: + request (google.cloud.datacatalog_v1.types.DeletePolicyTagRequest): + The request object. Request message for + [DeletePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag]. + name (str): + Required. Resource name of the policy + tag to be deleted. All of its descendant + policy tags will also be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.DeletePolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.DeletePolicyTagRequest): + request = policytagmanager.DeletePolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def update_policy_tag( + self, + request: policytagmanager.UpdatePolicyTagRequest = None, + *, + policy_tag: policytagmanager.PolicyTag = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Updates a policy tag. This method can update the + policy tag's display name, description, and parent + policy tag. + + Args: + request (google.cloud.datacatalog_v1.types.UpdatePolicyTagRequest): + The request object. Request message for + [UpdatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag]. + policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): + The policy tag to update. Only the description, + display_name, and parent_policy_tag fields can be + updated. + + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy + (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider + the following hierarchy: + Geolocation + + LatLong + + City + + ZipCode + Policy tag "Geolocation" contains 3 + child policy tags: "LatLong", "City", + and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([policy_tag]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.UpdatePolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.UpdatePolicyTagRequest): + request = policytagmanager.UpdatePolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("policy_tag.name", request.policy_tag.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_policy_tags( + self, + request: policytagmanager.ListPolicyTagsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPolicyTagsPager: + r"""Lists all policy tags in a taxonomy. + + Args: + request (google.cloud.datacatalog_v1.types.ListPolicyTagsRequest): + The request object. Request message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + parent (str): + Required. Resource name of the + taxonomy to list the policy tags of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListPolicyTagsPager: + Response message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.ListPolicyTagsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.ListPolicyTagsRequest): + request = policytagmanager.ListPolicyTagsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_policy_tags] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPolicyTagsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_policy_tag( + self, + request: policytagmanager.GetPolicyTagRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Gets a policy tag. + + Args: + request (google.cloud.datacatalog_v1.types.GetPolicyTagRequest): + The request object. Request message for + [GetPolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag]. + name (str): + Required. Resource name of the + requested policy tag. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy + (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider + the following hierarchy: + Geolocation + + LatLong + + City + + ZipCode + Policy tag "Geolocation" contains 3 + child policy tags: "LatLong", "City", + and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.GetPolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.GetPolicyTagRequest): + request = policytagmanager.GetPolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the IAM policy for a policy tag or a taxonomy. + + Args: + request (google.iam.v1.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for `GetIamPolicy` + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy.GetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the IAM policy for a policy tag or a taxonomy. + + Args: + request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for `SetIamPolicy` + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy.SetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Returns the permissions that a caller has on a + specified policy tag or taxonomy. + + Args: + request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for + `TestIamPermissions` method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy.TestIamPermissionsRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datacatalog", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("PolicyTagManagerClient",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py new file mode 100644 index 00000000..6f068cb4 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py @@ -0,0 +1,285 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) + +from google.cloud.datacatalog_v1.types import policytagmanager + + +class ListTaxonomiesPager: + """A pager for iterating through ``list_taxonomies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``taxonomies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTaxonomies`` requests and continue to iterate + through the ``taxonomies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., policytagmanager.ListTaxonomiesResponse], + request: policytagmanager.ListTaxonomiesRequest, + response: policytagmanager.ListTaxonomiesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListTaxonomiesRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListTaxonomiesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListTaxonomiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[policytagmanager.ListTaxonomiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[policytagmanager.Taxonomy]: + for page in self.pages: + yield from page.taxonomies + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTaxonomiesAsyncPager: + """A pager for iterating through ``list_taxonomies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``taxonomies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTaxonomies`` requests and continue to iterate + through the ``taxonomies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[policytagmanager.ListTaxonomiesResponse]], + request: policytagmanager.ListTaxonomiesRequest, + response: policytagmanager.ListTaxonomiesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListTaxonomiesRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListTaxonomiesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListTaxonomiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[policytagmanager.ListTaxonomiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[policytagmanager.Taxonomy]: + async def async_generator(): + async for page in self.pages: + for response in page.taxonomies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPolicyTagsPager: + """A pager for iterating through ``list_policy_tags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``policy_tags`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPolicyTags`` requests and continue to iterate + through the ``policy_tags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., policytagmanager.ListPolicyTagsResponse], + request: policytagmanager.ListPolicyTagsRequest, + response: policytagmanager.ListPolicyTagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListPolicyTagsRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListPolicyTagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListPolicyTagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[policytagmanager.ListPolicyTagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[policytagmanager.PolicyTag]: + for page in self.pages: + yield from page.policy_tags + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPolicyTagsAsyncPager: + """A pager for iterating through ``list_policy_tags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``policy_tags`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPolicyTags`` requests and continue to iterate + through the ``policy_tags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[policytagmanager.ListPolicyTagsResponse]], + request: policytagmanager.ListPolicyTagsRequest, + response: policytagmanager.ListPolicyTagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListPolicyTagsRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListPolicyTagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListPolicyTagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[policytagmanager.ListPolicyTagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[policytagmanager.PolicyTag]: + async def async_generator(): + async for page in self.pages: + for response in page.policy_tags: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py new file mode 100644 index 00000000..95f18c5c --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import PolicyTagManagerTransport +from .grpc import PolicyTagManagerGrpcTransport +from .grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerTransport]] +_transport_registry["grpc"] = PolicyTagManagerGrpcTransport +_transport_registry["grpc_asyncio"] = PolicyTagManagerGrpcAsyncIOTransport + +__all__ = ( + "PolicyTagManagerTransport", + "PolicyTagManagerGrpcTransport", + "PolicyTagManagerGrpcAsyncIOTransport", +) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py new file mode 100644 index 00000000..aacfe62b --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py @@ -0,0 +1,294 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datacatalog", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class PolicyTagManagerTransport(abc.ABC): + """Abstract transport class for PolicyTagManager.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, + *, + host: str = "datacatalog.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=self._scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_taxonomy: gapic_v1.method.wrap_method( + self.create_taxonomy, default_timeout=None, client_info=client_info, + ), + self.delete_taxonomy: gapic_v1.method.wrap_method( + self.delete_taxonomy, default_timeout=None, client_info=client_info, + ), + self.update_taxonomy: gapic_v1.method.wrap_method( + self.update_taxonomy, default_timeout=None, client_info=client_info, + ), + self.list_taxonomies: gapic_v1.method.wrap_method( + self.list_taxonomies, default_timeout=None, client_info=client_info, + ), + self.get_taxonomy: gapic_v1.method.wrap_method( + self.get_taxonomy, default_timeout=None, client_info=client_info, + ), + self.create_policy_tag: gapic_v1.method.wrap_method( + self.create_policy_tag, default_timeout=None, client_info=client_info, + ), + self.delete_policy_tag: gapic_v1.method.wrap_method( + self.delete_policy_tag, default_timeout=None, client_info=client_info, + ), + self.update_policy_tag: gapic_v1.method.wrap_method( + self.update_policy_tag, default_timeout=None, client_info=client_info, + ), + self.list_policy_tags: gapic_v1.method.wrap_method( + self.list_policy_tags, default_timeout=None, client_info=client_info, + ), + self.get_policy_tag: gapic_v1.method.wrap_method( + self.get_policy_tag, default_timeout=None, client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, default_timeout=None, client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, default_timeout=None, client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + @property + def create_taxonomy( + self, + ) -> typing.Callable[ + [policytagmanager.CreateTaxonomyRequest], + typing.Union[ + policytagmanager.Taxonomy, typing.Awaitable[policytagmanager.Taxonomy] + ], + ]: + raise NotImplementedError() + + @property + def delete_taxonomy( + self, + ) -> typing.Callable[ + [policytagmanager.DeleteTaxonomyRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def update_taxonomy( + self, + ) -> typing.Callable[ + [policytagmanager.UpdateTaxonomyRequest], + typing.Union[ + policytagmanager.Taxonomy, typing.Awaitable[policytagmanager.Taxonomy] + ], + ]: + raise NotImplementedError() + + @property + def list_taxonomies( + self, + ) -> typing.Callable[ + [policytagmanager.ListTaxonomiesRequest], + typing.Union[ + policytagmanager.ListTaxonomiesResponse, + typing.Awaitable[policytagmanager.ListTaxonomiesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_taxonomy( + self, + ) -> typing.Callable[ + [policytagmanager.GetTaxonomyRequest], + typing.Union[ + policytagmanager.Taxonomy, typing.Awaitable[policytagmanager.Taxonomy] + ], + ]: + raise NotImplementedError() + + @property + def create_policy_tag( + self, + ) -> typing.Callable[ + [policytagmanager.CreatePolicyTagRequest], + typing.Union[ + policytagmanager.PolicyTag, typing.Awaitable[policytagmanager.PolicyTag] + ], + ]: + raise NotImplementedError() + + @property + def delete_policy_tag( + self, + ) -> typing.Callable[ + [policytagmanager.DeletePolicyTagRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def update_policy_tag( + self, + ) -> typing.Callable[ + [policytagmanager.UpdatePolicyTagRequest], + typing.Union[ + policytagmanager.PolicyTag, typing.Awaitable[policytagmanager.PolicyTag] + ], + ]: + raise NotImplementedError() + + @property + def list_policy_tags( + self, + ) -> typing.Callable[ + [policytagmanager.ListPolicyTagsRequest], + typing.Union[ + policytagmanager.ListPolicyTagsResponse, + typing.Awaitable[policytagmanager.ListPolicyTagsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_policy_tag( + self, + ) -> typing.Callable[ + [policytagmanager.GetPolicyTagRequest], + typing.Union[ + policytagmanager.PolicyTag, typing.Awaitable[policytagmanager.PolicyTag] + ], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.GetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.SetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> typing.Callable[ + [iam_policy.TestIamPermissionsRequest], + typing.Union[ + iam_policy.TestIamPermissionsResponse, + typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("PolicyTagManagerTransport",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py new file mode 100644 index 00000000..a20ebcc6 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py @@ -0,0 +1,597 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO + + +class PolicyTagManagerGrpcTransport(PolicyTagManagerTransport): + """gRPC backend transport for PolicyTagManager. + + Policy Tag Manager API service allows clients to manage their + policy tags and taxonomies. + + Policy tags are used to tag BigQuery columns and apply + additional access control policies. A taxonomy is a hierarchical + grouping of policy tags that classify data along a common axis. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "datacatalog.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "datacatalog.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_taxonomy( + self, + ) -> Callable[[policytagmanager.CreateTaxonomyRequest], policytagmanager.Taxonomy]: + r"""Return a callable for the create taxonomy method over gRPC. + + Creates a taxonomy in a specified project. The + taxonomy is initially empty, i.e., does not contain + policy tags. + + Returns: + Callable[[~.CreateTaxonomyRequest], + ~.Taxonomy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_taxonomy" not in self._stubs: + self._stubs["create_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/CreateTaxonomy", + request_serializer=policytagmanager.CreateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs["create_taxonomy"] + + @property + def delete_taxonomy( + self, + ) -> Callable[[policytagmanager.DeleteTaxonomyRequest], empty.Empty]: + r"""Return a callable for the delete taxonomy method over gRPC. + + Deletes a taxonomy. This method will also delete all + policy tags in this taxonomy, their associated policies, + and the policy tags references from BigQuery columns. + + Returns: + Callable[[~.DeleteTaxonomyRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_taxonomy" not in self._stubs: + self._stubs["delete_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/DeleteTaxonomy", + request_serializer=policytagmanager.DeleteTaxonomyRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_taxonomy"] + + @property + def update_taxonomy( + self, + ) -> Callable[[policytagmanager.UpdateTaxonomyRequest], policytagmanager.Taxonomy]: + r"""Return a callable for the update taxonomy method over gRPC. + + Updates a taxonomy. This method can update the + taxonomy's display name, description, and activated + policy types. + + Returns: + Callable[[~.UpdateTaxonomyRequest], + ~.Taxonomy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_taxonomy" not in self._stubs: + self._stubs["update_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/UpdateTaxonomy", + request_serializer=policytagmanager.UpdateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs["update_taxonomy"] + + @property + def list_taxonomies( + self, + ) -> Callable[ + [policytagmanager.ListTaxonomiesRequest], + policytagmanager.ListTaxonomiesResponse, + ]: + r"""Return a callable for the list taxonomies method over gRPC. + + Lists all taxonomies in a project in a particular + location that the caller has permission to view. + + Returns: + Callable[[~.ListTaxonomiesRequest], + ~.ListTaxonomiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_taxonomies" not in self._stubs: + self._stubs["list_taxonomies"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/ListTaxonomies", + request_serializer=policytagmanager.ListTaxonomiesRequest.serialize, + response_deserializer=policytagmanager.ListTaxonomiesResponse.deserialize, + ) + return self._stubs["list_taxonomies"] + + @property + def get_taxonomy( + self, + ) -> Callable[[policytagmanager.GetTaxonomyRequest], policytagmanager.Taxonomy]: + r"""Return a callable for the get taxonomy method over gRPC. + + Gets a taxonomy. + + Returns: + Callable[[~.GetTaxonomyRequest], + ~.Taxonomy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_taxonomy" not in self._stubs: + self._stubs["get_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/GetTaxonomy", + request_serializer=policytagmanager.GetTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs["get_taxonomy"] + + @property + def create_policy_tag( + self, + ) -> Callable[ + [policytagmanager.CreatePolicyTagRequest], policytagmanager.PolicyTag + ]: + r"""Return a callable for the create policy tag method over gRPC. + + Creates a policy tag in a taxonomy. + + Returns: + Callable[[~.CreatePolicyTagRequest], + ~.PolicyTag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_policy_tag" not in self._stubs: + self._stubs["create_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/CreatePolicyTag", + request_serializer=policytagmanager.CreatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs["create_policy_tag"] + + @property + def delete_policy_tag( + self, + ) -> Callable[[policytagmanager.DeletePolicyTagRequest], empty.Empty]: + r"""Return a callable for the delete policy tag method over gRPC. + + Deletes a policy tag. This method also deletes + - all of its descendant policy tags, if any, + - the policies associated with the policy tag and its + descendants, and - references from BigQuery table + schema of the policy tag and its descendants. + + Returns: + Callable[[~.DeletePolicyTagRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_policy_tag" not in self._stubs: + self._stubs["delete_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/DeletePolicyTag", + request_serializer=policytagmanager.DeletePolicyTagRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_policy_tag"] + + @property + def update_policy_tag( + self, + ) -> Callable[ + [policytagmanager.UpdatePolicyTagRequest], policytagmanager.PolicyTag + ]: + r"""Return a callable for the update policy tag method over gRPC. + + Updates a policy tag. This method can update the + policy tag's display name, description, and parent + policy tag. + + Returns: + Callable[[~.UpdatePolicyTagRequest], + ~.PolicyTag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_policy_tag" not in self._stubs: + self._stubs["update_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/UpdatePolicyTag", + request_serializer=policytagmanager.UpdatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs["update_policy_tag"] + + @property + def list_policy_tags( + self, + ) -> Callable[ + [policytagmanager.ListPolicyTagsRequest], + policytagmanager.ListPolicyTagsResponse, + ]: + r"""Return a callable for the list policy tags method over gRPC. + + Lists all policy tags in a taxonomy. + + Returns: + Callable[[~.ListPolicyTagsRequest], + ~.ListPolicyTagsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_policy_tags" not in self._stubs: + self._stubs["list_policy_tags"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/ListPolicyTags", + request_serializer=policytagmanager.ListPolicyTagsRequest.serialize, + response_deserializer=policytagmanager.ListPolicyTagsResponse.deserialize, + ) + return self._stubs["list_policy_tags"] + + @property + def get_policy_tag( + self, + ) -> Callable[[policytagmanager.GetPolicyTagRequest], policytagmanager.PolicyTag]: + r"""Return a callable for the get policy tag method over gRPC. + + Gets a policy tag. + + Returns: + Callable[[~.GetPolicyTagRequest], + ~.PolicyTag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_policy_tag" not in self._stubs: + self._stubs["get_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/GetPolicyTag", + request_serializer=policytagmanager.GetPolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs["get_policy_tag"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the IAM policy for a policy tag or a taxonomy. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the IAM policy for a policy tag or a taxonomy. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns the permissions that a caller has on a + specified policy tag or taxonomy. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("PolicyTagManagerGrpcTransport",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py new file mode 100644 index 00000000..13f68af2 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py @@ -0,0 +1,610 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO +from .grpc import PolicyTagManagerGrpcTransport + + +class PolicyTagManagerGrpcAsyncIOTransport(PolicyTagManagerTransport): + """gRPC AsyncIO backend transport for PolicyTagManager. + + Policy Tag Manager API service allows clients to manage their + policy tags and taxonomies. + + Policy tags are used to tag BigQuery columns and apply + additional access control policies. A taxonomy is a hierarchical + grouping of policy tags that classify data along a common axis. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "datacatalog.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "datacatalog.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_taxonomy( + self, + ) -> Callable[ + [policytagmanager.CreateTaxonomyRequest], Awaitable[policytagmanager.Taxonomy] + ]: + r"""Return a callable for the create taxonomy method over gRPC. + + Creates a taxonomy in a specified project. The + taxonomy is initially empty, i.e., does not contain + policy tags. + + Returns: + Callable[[~.CreateTaxonomyRequest], + Awaitable[~.Taxonomy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_taxonomy" not in self._stubs: + self._stubs["create_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/CreateTaxonomy", + request_serializer=policytagmanager.CreateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs["create_taxonomy"] + + @property + def delete_taxonomy( + self, + ) -> Callable[[policytagmanager.DeleteTaxonomyRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete taxonomy method over gRPC. + + Deletes a taxonomy. This method will also delete all + policy tags in this taxonomy, their associated policies, + and the policy tags references from BigQuery columns. + + Returns: + Callable[[~.DeleteTaxonomyRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_taxonomy" not in self._stubs: + self._stubs["delete_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/DeleteTaxonomy", + request_serializer=policytagmanager.DeleteTaxonomyRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_taxonomy"] + + @property + def update_taxonomy( + self, + ) -> Callable[ + [policytagmanager.UpdateTaxonomyRequest], Awaitable[policytagmanager.Taxonomy] + ]: + r"""Return a callable for the update taxonomy method over gRPC. + + Updates a taxonomy. This method can update the + taxonomy's display name, description, and activated + policy types. + + Returns: + Callable[[~.UpdateTaxonomyRequest], + Awaitable[~.Taxonomy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_taxonomy" not in self._stubs: + self._stubs["update_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/UpdateTaxonomy", + request_serializer=policytagmanager.UpdateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs["update_taxonomy"] + + @property + def list_taxonomies( + self, + ) -> Callable[ + [policytagmanager.ListTaxonomiesRequest], + Awaitable[policytagmanager.ListTaxonomiesResponse], + ]: + r"""Return a callable for the list taxonomies method over gRPC. + + Lists all taxonomies in a project in a particular + location that the caller has permission to view. + + Returns: + Callable[[~.ListTaxonomiesRequest], + Awaitable[~.ListTaxonomiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_taxonomies" not in self._stubs: + self._stubs["list_taxonomies"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/ListTaxonomies", + request_serializer=policytagmanager.ListTaxonomiesRequest.serialize, + response_deserializer=policytagmanager.ListTaxonomiesResponse.deserialize, + ) + return self._stubs["list_taxonomies"] + + @property + def get_taxonomy( + self, + ) -> Callable[ + [policytagmanager.GetTaxonomyRequest], Awaitable[policytagmanager.Taxonomy] + ]: + r"""Return a callable for the get taxonomy method over gRPC. + + Gets a taxonomy. + + Returns: + Callable[[~.GetTaxonomyRequest], + Awaitable[~.Taxonomy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_taxonomy" not in self._stubs: + self._stubs["get_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/GetTaxonomy", + request_serializer=policytagmanager.GetTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs["get_taxonomy"] + + @property + def create_policy_tag( + self, + ) -> Callable[ + [policytagmanager.CreatePolicyTagRequest], Awaitable[policytagmanager.PolicyTag] + ]: + r"""Return a callable for the create policy tag method over gRPC. + + Creates a policy tag in a taxonomy. + + Returns: + Callable[[~.CreatePolicyTagRequest], + Awaitable[~.PolicyTag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_policy_tag" not in self._stubs: + self._stubs["create_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/CreatePolicyTag", + request_serializer=policytagmanager.CreatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs["create_policy_tag"] + + @property + def delete_policy_tag( + self, + ) -> Callable[[policytagmanager.DeletePolicyTagRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete policy tag method over gRPC. + + Deletes a policy tag. This method also deletes + - all of its descendant policy tags, if any, + - the policies associated with the policy tag and its + descendants, and - references from BigQuery table + schema of the policy tag and its descendants. + + Returns: + Callable[[~.DeletePolicyTagRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_policy_tag" not in self._stubs: + self._stubs["delete_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/DeletePolicyTag", + request_serializer=policytagmanager.DeletePolicyTagRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_policy_tag"] + + @property + def update_policy_tag( + self, + ) -> Callable[ + [policytagmanager.UpdatePolicyTagRequest], Awaitable[policytagmanager.PolicyTag] + ]: + r"""Return a callable for the update policy tag method over gRPC. + + Updates a policy tag. This method can update the + policy tag's display name, description, and parent + policy tag. + + Returns: + Callable[[~.UpdatePolicyTagRequest], + Awaitable[~.PolicyTag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_policy_tag" not in self._stubs: + self._stubs["update_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/UpdatePolicyTag", + request_serializer=policytagmanager.UpdatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs["update_policy_tag"] + + @property + def list_policy_tags( + self, + ) -> Callable[ + [policytagmanager.ListPolicyTagsRequest], + Awaitable[policytagmanager.ListPolicyTagsResponse], + ]: + r"""Return a callable for the list policy tags method over gRPC. + + Lists all policy tags in a taxonomy. + + Returns: + Callable[[~.ListPolicyTagsRequest], + Awaitable[~.ListPolicyTagsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_policy_tags" not in self._stubs: + self._stubs["list_policy_tags"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/ListPolicyTags", + request_serializer=policytagmanager.ListPolicyTagsRequest.serialize, + response_deserializer=policytagmanager.ListPolicyTagsResponse.deserialize, + ) + return self._stubs["list_policy_tags"] + + @property + def get_policy_tag( + self, + ) -> Callable[ + [policytagmanager.GetPolicyTagRequest], Awaitable[policytagmanager.PolicyTag] + ]: + r"""Return a callable for the get policy tag method over gRPC. + + Gets a policy tag. + + Returns: + Callable[[~.GetPolicyTagRequest], + Awaitable[~.PolicyTag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_policy_tag" not in self._stubs: + self._stubs["get_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/GetPolicyTag", + request_serializer=policytagmanager.GetPolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs["get_policy_tag"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the IAM policy for a policy tag or a taxonomy. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the IAM policy for a policy tag or a taxonomy. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], + Awaitable[iam_policy.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns the permissions that a caller has on a + specified policy tag or taxonomy. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("PolicyTagManagerGrpcAsyncIOTransport",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py new file mode 100644 index 00000000..16fecda2 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import PolicyTagManagerSerializationClient +from .async_client import PolicyTagManagerSerializationAsyncClient + +__all__ = ( + "PolicyTagManagerSerializationClient", + "PolicyTagManagerSerializationAsyncClient", +) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py new file mode 100644 index 00000000..cb558d59 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py @@ -0,0 +1,313 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import policytagmanagerserialization + +from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport +from .client import PolicyTagManagerSerializationClient + + +class PolicyTagManagerSerializationAsyncClient: + """Policy Tag Manager serialization API service allows clients + to manipulate their policy tags and taxonomies in serialized + format, where taxonomy is a hierarchical group of policy tags. + """ + + _client: PolicyTagManagerSerializationClient + + DEFAULT_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_MTLS_ENDPOINT + + taxonomy_path = staticmethod(PolicyTagManagerSerializationClient.taxonomy_path) + parse_taxonomy_path = staticmethod( + PolicyTagManagerSerializationClient.parse_taxonomy_path + ) + + common_billing_account_path = staticmethod( + PolicyTagManagerSerializationClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PolicyTagManagerSerializationClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod( + PolicyTagManagerSerializationClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + PolicyTagManagerSerializationClient.parse_common_folder_path + ) + + common_organization_path = staticmethod( + PolicyTagManagerSerializationClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + PolicyTagManagerSerializationClient.parse_common_organization_path + ) + + common_project_path = staticmethod( + PolicyTagManagerSerializationClient.common_project_path + ) + parse_common_project_path = staticmethod( + PolicyTagManagerSerializationClient.parse_common_project_path + ) + + common_location_path = staticmethod( + PolicyTagManagerSerializationClient.common_location_path + ) + parse_common_location_path = staticmethod( + PolicyTagManagerSerializationClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationAsyncClient: The constructed client. + """ + return PolicyTagManagerSerializationClient.from_service_account_info.__func__(PolicyTagManagerSerializationAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationAsyncClient: The constructed client. + """ + return PolicyTagManagerSerializationClient.from_service_account_file.__func__(PolicyTagManagerSerializationAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PolicyTagManagerSerializationTransport: + """Return the transport used by the client instance. + + Returns: + PolicyTagManagerSerializationTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(PolicyTagManagerSerializationClient).get_transport_class, + type(PolicyTagManagerSerializationClient), + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, PolicyTagManagerSerializationTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the policy tag manager serialization client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PolicyTagManagerSerializationTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = PolicyTagManagerSerializationClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def import_taxonomies( + self, + request: policytagmanagerserialization.ImportTaxonomiesRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ImportTaxonomiesResponse: + r"""Creates new taxonomies (including their policy tags) + by importing from inlined source or cross-regional + source. New taxonomies will be created in a given parent + project. + + If using the cross-regional source, a new taxonomy is + created by copying from a source in another region. + + If using the inlined source, this method provides a way + to bulk create taxonomies and policy tags using nested + proto structure. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.ImportTaxonomiesRequest`): + The request object. Request message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.ImportTaxonomiesResponse: + Response message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + + request = policytagmanagerserialization.ImportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_taxonomies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def export_taxonomies( + self, + request: policytagmanagerserialization.ExportTaxonomiesRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ExportTaxonomiesResponse: + r"""Exports taxonomies as the requested type and returns + the taxonomies including their policy tags. The + requested taxonomies must belong to one project. + + SerializedTaxonomy protos with nested policy tags that + are generated by this method can be used as input for + future ImportTaxonomies calls. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.ExportTaxonomiesRequest`): + The request object. Request message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.ExportTaxonomiesResponse: + Response message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + + request = policytagmanagerserialization.ExportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_taxonomies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datacatalog", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("PolicyTagManagerSerializationAsyncClient",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py new file mode 100644 index 00000000..d0e54656 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py @@ -0,0 +1,490 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import policytagmanagerserialization + +from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import PolicyTagManagerSerializationGrpcTransport +from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport + + +class PolicyTagManagerSerializationClientMeta(type): + """Metaclass for the PolicyTagManagerSerialization client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[PolicyTagManagerSerializationTransport]] + _transport_registry["grpc"] = PolicyTagManagerSerializationGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = PolicyTagManagerSerializationGrpcAsyncIOTransport + + def get_transport_class( + cls, label: str = None, + ) -> Type[PolicyTagManagerSerializationTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PolicyTagManagerSerializationClient( + metaclass=PolicyTagManagerSerializationClientMeta +): + """Policy Tag Manager serialization API service allows clients + to manipulate their policy tags and taxonomies in serialized + format, where taxonomy is a hierarchical group of policy tags. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "datacatalog.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PolicyTagManagerSerializationTransport: + """Return the transport used by the client instance. + + Returns: + PolicyTagManagerSerializationTransport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def taxonomy_path(project: str, location: str, taxonomy: str,) -> str: + """Return a fully-qualified taxonomy string.""" + return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( + project=project, location=location, taxonomy=taxonomy, + ) + + @staticmethod + def parse_taxonomy_path(path: str) -> Dict[str, str]: + """Parse a taxonomy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, PolicyTagManagerSerializationTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the policy tag manager serialization client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PolicyTagManagerSerializationTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PolicyTagManagerSerializationTransport): + # transport is a PolicyTagManagerSerializationTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def import_taxonomies( + self, + request: policytagmanagerserialization.ImportTaxonomiesRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ImportTaxonomiesResponse: + r"""Creates new taxonomies (including their policy tags) + by importing from inlined source or cross-regional + source. New taxonomies will be created in a given parent + project. + + If using the cross-regional source, a new taxonomy is + created by copying from a source in another region. + + If using the inlined source, this method provides a way + to bulk create taxonomies and policy tags using nested + proto structure. + + Args: + request (google.cloud.datacatalog_v1.types.ImportTaxonomiesRequest): + The request object. Request message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.ImportTaxonomiesResponse: + Response message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanagerserialization.ImportTaxonomiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, policytagmanagerserialization.ImportTaxonomiesRequest + ): + request = policytagmanagerserialization.ImportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def export_taxonomies( + self, + request: policytagmanagerserialization.ExportTaxonomiesRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ExportTaxonomiesResponse: + r"""Exports taxonomies as the requested type and returns + the taxonomies including their policy tags. The + requested taxonomies must belong to one project. + + SerializedTaxonomy protos with nested policy tags that + are generated by this method can be used as input for + future ImportTaxonomies calls. + + Args: + request (google.cloud.datacatalog_v1.types.ExportTaxonomiesRequest): + The request object. Request message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.ExportTaxonomiesResponse: + Response message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanagerserialization.ExportTaxonomiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, policytagmanagerserialization.ExportTaxonomiesRequest + ): + request = policytagmanagerserialization.ExportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datacatalog", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("PolicyTagManagerSerializationClient",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py new file mode 100644 index 00000000..1e108bd2 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import PolicyTagManagerSerializationTransport +from .grpc import PolicyTagManagerSerializationGrpcTransport +from .grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[PolicyTagManagerSerializationTransport]] +_transport_registry["grpc"] = PolicyTagManagerSerializationGrpcTransport +_transport_registry["grpc_asyncio"] = PolicyTagManagerSerializationGrpcAsyncIOTransport + +__all__ = ( + "PolicyTagManagerSerializationTransport", + "PolicyTagManagerSerializationGrpcTransport", + "PolicyTagManagerSerializationGrpcAsyncIOTransport", +) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py new file mode 100644 index 00000000..2e30ca79 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py @@ -0,0 +1,142 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanagerserialization + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datacatalog", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class PolicyTagManagerSerializationTransport(abc.ABC): + """Abstract transport class for PolicyTagManagerSerialization.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, + *, + host: str = "datacatalog.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=self._scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.import_taxonomies: gapic_v1.method.wrap_method( + self.import_taxonomies, default_timeout=None, client_info=client_info, + ), + self.export_taxonomies: gapic_v1.method.wrap_method( + self.export_taxonomies, default_timeout=None, client_info=client_info, + ), + } + + @property + def import_taxonomies( + self, + ) -> typing.Callable[ + [policytagmanagerserialization.ImportTaxonomiesRequest], + typing.Union[ + policytagmanagerserialization.ImportTaxonomiesResponse, + typing.Awaitable[policytagmanagerserialization.ImportTaxonomiesResponse], + ], + ]: + raise NotImplementedError() + + @property + def export_taxonomies( + self, + ) -> typing.Callable[ + [policytagmanagerserialization.ExportTaxonomiesRequest], + typing.Union[ + policytagmanagerserialization.ExportTaxonomiesResponse, + typing.Awaitable[policytagmanagerserialization.ExportTaxonomiesResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("PolicyTagManagerSerializationTransport",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py new file mode 100644 index 00000000..9c99a411 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanagerserialization + +from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO + + +class PolicyTagManagerSerializationGrpcTransport( + PolicyTagManagerSerializationTransport +): + """gRPC backend transport for PolicyTagManagerSerialization. + + Policy Tag Manager serialization API service allows clients + to manipulate their policy tags and taxonomies in serialized + format, where taxonomy is a hierarchical group of policy tags. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "datacatalog.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "datacatalog.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def import_taxonomies( + self, + ) -> Callable[ + [policytagmanagerserialization.ImportTaxonomiesRequest], + policytagmanagerserialization.ImportTaxonomiesResponse, + ]: + r"""Return a callable for the import taxonomies method over gRPC. + + Creates new taxonomies (including their policy tags) + by importing from inlined source or cross-regional + source. New taxonomies will be created in a given parent + project. + + If using the cross-regional source, a new taxonomy is + created by copying from a source in another region. + + If using the inlined source, this method provides a way + to bulk create taxonomies and policy tags using nested + proto structure. + + Returns: + Callable[[~.ImportTaxonomiesRequest], + ~.ImportTaxonomiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_taxonomies" not in self._stubs: + self._stubs["import_taxonomies"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ImportTaxonomies", + request_serializer=policytagmanagerserialization.ImportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ImportTaxonomiesResponse.deserialize, + ) + return self._stubs["import_taxonomies"] + + @property + def export_taxonomies( + self, + ) -> Callable[ + [policytagmanagerserialization.ExportTaxonomiesRequest], + policytagmanagerserialization.ExportTaxonomiesResponse, + ]: + r"""Return a callable for the export taxonomies method over gRPC. + + Exports taxonomies as the requested type and returns + the taxonomies including their policy tags. The + requested taxonomies must belong to one project. + + SerializedTaxonomy protos with nested policy tags that + are generated by this method can be used as input for + future ImportTaxonomies calls. + + Returns: + Callable[[~.ExportTaxonomiesRequest], + ~.ExportTaxonomiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_taxonomies" not in self._stubs: + self._stubs["export_taxonomies"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ExportTaxonomies", + request_serializer=policytagmanagerserialization.ExportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ExportTaxonomiesResponse.deserialize, + ) + return self._stubs["export_taxonomies"] + + +__all__ = ("PolicyTagManagerSerializationGrpcTransport",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py new file mode 100644 index 00000000..2cea6f2f --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py @@ -0,0 +1,307 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanagerserialization + +from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO +from .grpc import PolicyTagManagerSerializationGrpcTransport + + +class PolicyTagManagerSerializationGrpcAsyncIOTransport( + PolicyTagManagerSerializationTransport +): + """gRPC AsyncIO backend transport for PolicyTagManagerSerialization. + + Policy Tag Manager serialization API service allows clients + to manipulate their policy tags and taxonomies in serialized + format, where taxonomy is a hierarchical group of policy tags. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "datacatalog.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "datacatalog.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def import_taxonomies( + self, + ) -> Callable[ + [policytagmanagerserialization.ImportTaxonomiesRequest], + Awaitable[policytagmanagerserialization.ImportTaxonomiesResponse], + ]: + r"""Return a callable for the import taxonomies method over gRPC. + + Creates new taxonomies (including their policy tags) + by importing from inlined source or cross-regional + source. New taxonomies will be created in a given parent + project. + + If using the cross-regional source, a new taxonomy is + created by copying from a source in another region. + + If using the inlined source, this method provides a way + to bulk create taxonomies and policy tags using nested + proto structure. + + Returns: + Callable[[~.ImportTaxonomiesRequest], + Awaitable[~.ImportTaxonomiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_taxonomies" not in self._stubs: + self._stubs["import_taxonomies"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ImportTaxonomies", + request_serializer=policytagmanagerserialization.ImportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ImportTaxonomiesResponse.deserialize, + ) + return self._stubs["import_taxonomies"] + + @property + def export_taxonomies( + self, + ) -> Callable[ + [policytagmanagerserialization.ExportTaxonomiesRequest], + Awaitable[policytagmanagerserialization.ExportTaxonomiesResponse], + ]: + r"""Return a callable for the export taxonomies method over gRPC. + + Exports taxonomies as the requested type and returns + the taxonomies including their policy tags. The + requested taxonomies must belong to one project. + + SerializedTaxonomy protos with nested policy tags that + are generated by this method can be used as input for + future ImportTaxonomies calls. + + Returns: + Callable[[~.ExportTaxonomiesRequest], + Awaitable[~.ExportTaxonomiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_taxonomies" not in self._stubs: + self._stubs["export_taxonomies"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ExportTaxonomies", + request_serializer=policytagmanagerserialization.ExportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ExportTaxonomiesResponse.deserialize, + ) + return self._stubs["export_taxonomies"] + + +__all__ = ("PolicyTagManagerSerializationGrpcAsyncIOTransport",) diff --git a/google/cloud/datacatalog_v1/types/__init__.py b/google/cloud/datacatalog_v1/types/__init__.py index fc60cdfe..d110888f 100644 --- a/google/cloud/datacatalog_v1/types/__init__.py +++ b/google/cloud/datacatalog_v1/types/__init__.py @@ -15,12 +15,14 @@ # limitations under the License. # +from .data_source import DataSource from .datacatalog import ( CreateEntryGroupRequest, CreateEntryRequest, CreateTagRequest, CreateTagTemplateFieldRequest, CreateTagTemplateRequest, + DatabaseTableSpec, DeleteEntryGroupRequest, DeleteEntryRequest, DeleteTagRequest, @@ -38,6 +40,7 @@ ListTagsRequest, ListTagsResponse, LookupEntryRequest, + RenameTagTemplateFieldEnumValueRequest, RenameTagTemplateFieldRequest, SearchCatalogRequest, SearchCatalogResponse, @@ -52,6 +55,32 @@ GcsFilesetSpec, GcsFileSpec, ) +from .policytagmanager import ( + CreatePolicyTagRequest, + CreateTaxonomyRequest, + DeletePolicyTagRequest, + DeleteTaxonomyRequest, + GetPolicyTagRequest, + GetTaxonomyRequest, + ListPolicyTagsRequest, + ListPolicyTagsResponse, + ListTaxonomiesRequest, + ListTaxonomiesResponse, + PolicyTag, + Taxonomy, + UpdatePolicyTagRequest, + UpdateTaxonomyRequest, +) +from .policytagmanagerserialization import ( + CrossRegionalSource, + ExportTaxonomiesRequest, + ExportTaxonomiesResponse, + ImportTaxonomiesRequest, + ImportTaxonomiesResponse, + InlineSource, + SerializedPolicyTag, + SerializedTaxonomy, +) from .schema import ( ColumnSchema, Schema, @@ -78,11 +107,13 @@ __all__ = ( "IntegratedSystem", + "DataSource", "CreateEntryGroupRequest", "CreateEntryRequest", "CreateTagRequest", "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", + "DatabaseTableSpec", "DeleteEntryGroupRequest", "DeleteEntryRequest", "DeleteTagRequest", @@ -100,6 +131,7 @@ "ListTagsRequest", "ListTagsResponse", "LookupEntryRequest", + "RenameTagTemplateFieldEnumValueRequest", "RenameTagTemplateFieldRequest", "SearchCatalogRequest", "SearchCatalogResponse", @@ -111,6 +143,28 @@ "EntryType", "GcsFilesetSpec", "GcsFileSpec", + "CreatePolicyTagRequest", + "CreateTaxonomyRequest", + "DeletePolicyTagRequest", + "DeleteTaxonomyRequest", + "GetPolicyTagRequest", + "GetTaxonomyRequest", + "ListPolicyTagsRequest", + "ListPolicyTagsResponse", + "ListTaxonomiesRequest", + "ListTaxonomiesResponse", + "PolicyTag", + "Taxonomy", + "UpdatePolicyTagRequest", + "UpdateTaxonomyRequest", + "CrossRegionalSource", + "ExportTaxonomiesRequest", + "ExportTaxonomiesResponse", + "ImportTaxonomiesRequest", + "ImportTaxonomiesResponse", + "InlineSource", + "SerializedPolicyTag", + "SerializedTaxonomy", "ColumnSchema", "Schema", "SearchCatalogResult", diff --git a/google/cloud/datacatalog_v1/types/common.py b/google/cloud/datacatalog_v1/types/common.py index feace354..51dd41c0 100644 --- a/google/cloud/datacatalog_v1/types/common.py +++ b/google/cloud/datacatalog_v1/types/common.py @@ -30,6 +30,7 @@ class IntegratedSystem(proto.Enum): INTEGRATED_SYSTEM_UNSPECIFIED = 0 BIGQUERY = 1 CLOUD_PUBSUB = 2 + DATAPROC_METASTORE = 3 __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1/types/data_source.py b/google/cloud/datacatalog_v1/types/data_source.py new file mode 100644 index 00000000..e70f40ee --- /dev/null +++ b/google/cloud/datacatalog_v1/types/data_source.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.datacatalog.v1", manifest={"DataSource",}, +) + + +class DataSource(proto.Message): + r"""Describes the physical location of an entry. + + Attributes: + service (google.cloud.datacatalog_v1.types.DataSource.Service): + Service in which the data is physically + stored. + resource (str): + Full name of the resource as defined by the service, e.g. + //bigquery.googleapis.com/projects/{project_id}/locations/{location}/datasets/{dataset_id}/tables/{table_id} + """ + + class Service(proto.Enum): + r"""Service name where the data is stored.""" + SERVICE_UNSPECIFIED = 0 + CLOUD_STORAGE = 1 + BIGQUERY = 2 + + service = proto.Field(proto.ENUM, number=1, enum=Service,) + + resource = proto.Field(proto.STRING, number=2) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1/types/datacatalog.py b/google/cloud/datacatalog_v1/types/datacatalog.py index e90fb67a..88dc77f6 100644 --- a/google/cloud/datacatalog_v1/types/datacatalog.py +++ b/google/cloud/datacatalog_v1/types/datacatalog.py @@ -19,6 +19,7 @@ from google.cloud.datacatalog_v1.types import common +from google.cloud.datacatalog_v1.types import data_source as gcd_data_source from google.cloud.datacatalog_v1.types import gcs_fileset_spec as gcd_gcs_fileset_spec from google.cloud.datacatalog_v1.types import schema as gcd_schema from google.cloud.datacatalog_v1.types import search @@ -46,6 +47,7 @@ "GetEntryRequest", "LookupEntryRequest", "Entry", + "DatabaseTableSpec", "EntryGroup", "CreateTagTemplateRequest", "GetTagTemplateRequest", @@ -57,6 +59,7 @@ "CreateTagTemplateFieldRequest", "UpdateTagTemplateFieldRequest", "RenameTagTemplateFieldRequest", + "RenameTagTemplateFieldEnumValueRequest", "DeleteTagTemplateFieldRequest", "ListTagsRequest", "ListTagsResponse", @@ -76,6 +79,8 @@ class EntryType(proto.Enum): MODEL = 5 DATA_STREAM = 3 FILESET = 4 + DATABASE = 7 + SERVICE = 14 class SearchCatalogRequest(proto.Message): @@ -89,8 +94,9 @@ class SearchCatalogRequest(proto.Message): false ``include_gcp_public_datasets`` is considered invalid. Data Catalog will return an error in such a case. query (str): - Required. The query string in search query syntax. The query - must be non-empty. + Optional. The query string in search query syntax. An empty + query string will result in all data assets (in the + specified scope) that the user has access to. Query strings can be simple as "x" or more qualified as: @@ -150,41 +156,15 @@ class Scope(proto.Message): Optional. The list of locations to search within. 1. If empty, search will be performed in all locations; - 2. If any of the locations are NOT in the valid locations - list, error will be returned; + 2. If any of the locations are NOT `supported + regions `__, + error will be returned; 3. Otherwise, search only the given locations for matching results. Typical usage is to leave this field empty. When a location is unreachable as returned in the ``SearchCatalogResponse.unreachable`` field, users can repeat the search request with this parameter set to get additional information on the error. - - Valid locations: - - - asia-east1 - - asia-east2 - - asia-northeast1 - - asia-northeast2 - - asia-northeast3 - - asia-south1 - - asia-southeast1 - - australia-southeast1 - - eu - - europe-north1 - - europe-west1 - - europe-west2 - - europe-west3 - - europe-west4 - - europe-west6 - - global - - northamerica-northeast1 - - southamerica-east1 - - us - - us-central1 - - us-east1 - - us-east4 - - us-west1 - - us-west2 """ include_org_ids = proto.RepeatedField(proto.STRING, number=2) @@ -221,7 +201,7 @@ class SearchCatalogResponse(proto.Message): from those locations. Users can get additional information on the error by repeating the search request with a more restrictive parameter -- setting the value for - ``SearchDataCatalogRequest.scope.include_locations``. + ``SearchDataCatalogRequest.scope.restricted_locations``. """ @property @@ -243,19 +223,19 @@ class CreateEntryGroupRequest(proto.Message): Attributes: parent (str): - Required. The name of the project this entry group is in. - Example: + Required. The name of the project this entry group belongs + to. Example: - - projects/{project_id}/locations/{location} + ``projects/{project_id}/locations/{location}`` - Note that this EntryGroup and its child resources may not - actually be stored in the location in this name. + Note: The entry group itself and its child resources might + not be stored in the location specified in its name. entry_group_id (str): - Required. The id of the entry group to - create. The id must begin with a letter or - underscore, contain only English letters, - numbers and underscores, and be at most 64 - characters. + Required. The ID of the entry group to create. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and must start with a letter or underscore. + The maximum size is 64 bytes when encoded in UTF-8. entry_group (google.cloud.datacatalog_v1.types.EntryGroup): The entry group to create. Defaults to an empty entry group. @@ -277,9 +257,12 @@ class UpdateEntryGroupRequest(proto.Message): Required. The updated entry group. "name" field must be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update on the entry group. If - absent or empty, all modifiable fields are - updated. + Names of fields whose values to overwrite on + an entry group. + If this parameter is absent or empty, all + modifiable fields are overwritten. If such + fields are non-required and omitted in the + request body, their values are emptied. """ entry_group = proto.Field(proto.MESSAGE, number=1, message="EntryGroup",) @@ -377,15 +360,19 @@ class CreateEntryRequest(proto.Message): Attributes: parent (str): - Required. The name of the entry group this entry is in. + Required. The name of the entry group this entry belongs to. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}`` - Note that this Entry and its child resources may not - actually be stored in the location in this name. + Note: The entry itself and its child resources might not be + stored in the location specified in its name. entry_id (str): - Required. The id of the entry to create. + Required. The ID of the entry to create. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + and underscores (_). The maximum size is 64 bytes when + encoded in UTF-8. entry (google.cloud.datacatalog_v1.types.Entry): Required. The entry to create. """ @@ -406,8 +393,11 @@ class UpdateEntryRequest(proto.Message): Required. The updated entry. The "name" field must be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update on the entry. If absent or empty, all - modifiable fields are updated. + Names of fields whose values to overwrite on an entry. + + If this parameter is absent or empty, all modifiable fields + are overwritten. If such fields are non-required and omitted + in the request body, their values are emptied. The following fields are modifiable: @@ -415,7 +405,7 @@ class UpdateEntryRequest(proto.Message): - ``schema`` - - For entries with type ``FILESET`` + - For entries with type ``FILESET``: - ``schema`` - ``display_name`` @@ -423,15 +413,15 @@ class UpdateEntryRequest(proto.Message): - ``gcs_fileset_spec`` - ``gcs_fileset_spec.file_patterns`` - - For entries with ``user_specified_type`` + - For entries with ``user_specified_type``: - ``schema`` - ``display_name`` - ``description`` - - user_specified_type - - user_specified_system - - linked_resource - - source_system_timestamps + - ``user_specified_type`` + - ``user_specified_system`` + - ``linked_resource`` + - ``source_system_timestamps`` """ entry = proto.Field(proto.MESSAGE, number=1, message="Entry",) @@ -493,15 +483,33 @@ class LookupEntryRequest(proto.Message): - ``bigquery.dataset.project_id.dataset_id`` - ``datacatalog.entry.project_id.location_id.entry_group_id.entry_id`` - ``*_id``\ s shoud satisfy the standard SQL rules for + ``*_id``\ s should satisfy the standard SQL rules for identifiers. https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical. + fully_qualified_name (str): + Fully qualified name (FQN) of the resource. + + FQNs take two forms: + + - For non-regionalized resources: + + ``{SYSTEM}:{PROJECT}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` + + - For regionalized resources: + + ``{SYSTEM}:{PROJECT}.{LOCATION_ID}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` + + Example for a DPMS table: + + ``dataproc_metastore:project_id.location_id.instance_id.database_id.table_id`` """ linked_resource = proto.Field(proto.STRING, number=1, oneof="target_name") sql_resource = proto.Field(proto.STRING, number=3, oneof="target_name") + fully_qualified_name = proto.Field(proto.STRING, number=5, oneof="target_name") + class Entry(proto.Message): r"""Entry Metadata. A Data Catalog Entry resource represents another @@ -516,13 +524,13 @@ class Entry(proto.Message): Attributes: name (str): - The Data Catalog resource name of the entry in URL format. + Output only. The resource name of an entry in URL format. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}`` - Note that this Entry and its child resources may not - actually be stored in the location in this name. + Note: The entry itself and its child resources might not be + stored in the location specified in its name. linked_resource (str): The resource this metadata entry refers to. @@ -532,11 +540,38 @@ class Entry(proto.Message): For example, the ``linked_resource`` for a table resource from BigQuery is: - - //bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId + ``//bigquery.googleapis.com/projects/{projectId}/datasets/{datasetId}/tables/{tableId}`` + + Output only when entry is one of the types in the + ``EntryType`` enum. + + For entries with a ``user_specified_type``, this field is + optional and defaults to an empty string. + + The resource string must contain only letters (a-z, A-Z), + numbers (0-9), underscores (_), periods (.), colons (:), + slashes (/), dashes (-), and hashes (#). The maximum size is + 200 bytes when encoded in UTF-8. + fully_qualified_name (str): + Fully qualified name (FQN) of the resource. Set + automatically for entries representing resources from synced + systems. Settable only during creation and read-only + afterwards. Can be used for search and lookup of the + entries. + + FQNs take two forms: + + - For non-regionalized resources: + + ``{SYSTEM}:{PROJECT}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` - Output only when Entry is of type in the EntryType enum. For - entries with user_specified_type, this field is optional and - defaults to an empty string. + - For regionalized resources: + + ``{SYSTEM}:{PROJECT}.{LOCATION_ID}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` + + Example for a DPMS table: + + ``dataproc_metastore:project_id.location_id.instance_id.database_id.table_id`` type_ (google.cloud.datacatalog_v1.types.EntryType): The type of the entry. Only used for Entries with types in the @@ -577,16 +612,26 @@ class Entry(proto.Message): Specification for a group of BigQuery tables with name pattern ``[prefix]YYYYMMDD``. Context: https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding. + database_table_spec (google.cloud.datacatalog_v1.types.DatabaseTableSpec): + Specification that applies to a table resource. Only valid + for entries of ``TABLE`` type. display_name (str): - Display information such as title and - description. A short name to identify the entry, - for example, "Analytics Data - Jan 2011". - Default value is an empty string. + Display name of an entry. + + The name must contain only Unicode letters, numbers (0-9), + underscores (_), dashes (-), spaces ( ), and can't start or + end with spaces. The maximum size is 200 bytes when encoded + in UTF-8. Default value is an empty string. description (str): - Entry description, which can consist of - several sentences or paragraphs that describe - entry contents. Default value is an empty - string. + Entry description that can consist of several + sentences or paragraphs that describe entry + contents. + The description must not contain Unicode non- + characters as well as C0 and C1 control codes + except tabs (HT), new lines (LF), carriage + returns (CR), and page breaks (FF). + The maximum size is 2000 bytes when encoded in + UTF-8. Default value is an empty string. schema (google.cloud.datacatalog_v1.types.Schema): Schema of the entry. An entry might not have any schema attached to it. @@ -595,12 +640,16 @@ class Entry(proto.Message): Data Catalog entry. Output only when Entry is of type in the EntryType enum. For entries with user_specified_type, this field is optional and defaults to an empty timestamp. + data_source (google.cloud.datacatalog_v1.types.DataSource): + Output only. Physical location of the entry. """ name = proto.Field(proto.STRING, number=1) linked_resource = proto.Field(proto.STRING, number=9) + fully_qualified_name = proto.Field(proto.STRING, number=29) + type_ = proto.Field(proto.ENUM, number=2, oneof="entry_type", enum="EntryType",) user_specified_type = proto.Field(proto.STRING, number=16, oneof="entry_type") @@ -632,6 +681,10 @@ class Entry(proto.Message): message=table_spec.BigQueryDateShardedSpec, ) + database_table_spec = proto.Field( + proto.MESSAGE, number=24, oneof="spec", message="DatabaseTableSpec", + ) + display_name = proto.Field(proto.STRING, number=3) description = proto.Field(proto.STRING, number=4) @@ -642,6 +695,28 @@ class Entry(proto.Message): proto.MESSAGE, number=7, message=timestamps.SystemTimestamps, ) + data_source = proto.Field( + proto.MESSAGE, number=20, message=gcd_data_source.DataSource, + ) + + +class DatabaseTableSpec(proto.Message): + r"""Specification that applies to a table resource. Only valid for + entries of ``TABLE`` type. + + Attributes: + type_ (google.cloud.datacatalog_v1.types.DatabaseTableSpec.TableType): + Type of this table. + """ + + class TableType(proto.Enum): + r"""Type of the table.""" + TABLE_TYPE_UNSPECIFIED = 0 + NATIVE = 1 + EXTERNAL = 2 + + type_ = proto.Field(proto.ENUM, number=1, enum=TableType,) + class EntryGroup(proto.Message): r"""EntryGroup Metadata. An EntryGroup resource represents a logical @@ -652,10 +727,10 @@ class EntryGroup(proto.Message): name (str): The resource name of the entry group in URL format. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}`` - Note that this EntryGroup and its child resources may not - actually be stored in the location in this name. + Note: The entry group itself and its child resources might + not be stored in the location specified in its name. display_name (str): A short name to identify the entry group, for example, "analytics data - jan 2011". Default @@ -694,8 +769,12 @@ class CreateTagTemplateRequest(proto.Message): - projects/{project_id}/locations/us-central1 tag_template_id (str): - Required. The id of the tag template to - create. + Required. The ID of the tag template to create. + + The ID must contain only lowercase letters (a-z), numbers + (0-9), or underscores (_), and must start with a letter or + underscore. The maximum size is 64 bytes when encoded in + UTF-8. tag_template (google.cloud.datacatalog_v1.types.TagTemplate): Required. The tag template to create. """ @@ -730,15 +809,13 @@ class UpdateTagTemplateRequest(proto.Message): Required. The template to update. The "name" field must be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The field mask specifies the parts of the template to - overwrite. - - Allowed fields: - - - ``display_name`` + Names of fields whose values to overwrite on a tag template. + Currently, only ``display_name`` can be overwritten. - If absent or empty, all of the allowed fields above will be - updated. + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their values + are emptied. """ tag_template = proto.Field(proto.MESSAGE, number=1, message=gcd_tags.TagTemplate,) @@ -774,12 +851,13 @@ class CreateTagRequest(proto.Message): Attributes: parent (str): Required. The name of the resource to attach this tag to. - Tags can be attached to Entries. Example: + Tags can be attached to entries. An entry can have up to + 1000 attached tags. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}`` - Note that this Tag and its child resources may not actually - be stored in the location in this name. + Note: The tag and its child resources might not be stored in + the location specified in its name. tag (google.cloud.datacatalog_v1.types.Tag): Required. The tag to create. """ @@ -798,9 +876,14 @@ class UpdateTagRequest(proto.Message): Required. The updated tag. The "name" field must be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update on the Tag. If absent or empty, all - modifiable fields are updated. Currently the only modifiable - field is the field ``fields``. + Names of fields whose values to overwrite on a tag. + Currently, a tag has the only modifiable field with the name + ``fields``. + + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their values + are emptied. """ tag = proto.Field(proto.MESSAGE, number=1, message=gcd_tags.Tag,) @@ -835,11 +918,16 @@ class CreateTagTemplateFieldRequest(proto.Message): - projects/{project_id}/locations/us-central1/tagTemplates/{tag_template_id} tag_template_field_id (str): - Required. The ID of the tag template field to create. Field - ids can contain letters (both uppercase and lowercase), - numbers (0-9), underscores (_) and dashes (-). Field IDs - must be at least 1 character long and at most 128 characters - long. Field IDs must also be unique within their template. + Required. The ID of the tag template field to create. + + Note: Adding a required field to an existing template is + *not* allowed. + + Field IDs can contain letters (both uppercase and + lowercase), numbers (0-9), underscores (_) and dashes (-). + Field IDs must be at least 1 character long and at most 128 + characters long. Field IDs must also be unique within their + template. tag_template_field (google.cloud.datacatalog_v1.types.TagTemplateField): Required. The tag template field to create. """ @@ -865,21 +953,24 @@ class UpdateTagTemplateFieldRequest(proto.Message): tag_template_field (google.cloud.datacatalog_v1.types.TagTemplateField): Required. The template to update. update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. The field mask specifies the parts of the template - to be updated. Allowed fields: + Optional. Names of fields whose values to overwrite on an + individual field of a tag template. The following fields are + modifiable: - ``display_name`` - ``type.enum_type`` - ``is_required`` - If ``update_mask`` is not set or empty, all of the allowed - fields above will be updated. + If this parameter is absent or empty, all modifiable fields + are overwritten. If such fields are non-required and omitted + in the request body, their values are emptied with one + exception: when updating an enum type, the provided values + are merged with the existing values. Therefore, enum values + can only be added, existing enum values cannot be deleted or + renamed. - When updating an enum type, the provided values will be - merged with the existing values. Therefore, enum values can - only be added, existing enum values cannot be deleted nor - renamed. Updating a template field from optional to required - is NOT allowed. + Additionally, updating a template field from optional to + required is *not* allowed. """ name = proto.Field(proto.STRING, number=1) @@ -910,6 +1001,25 @@ class RenameTagTemplateFieldRequest(proto.Message): new_tag_template_field_id = proto.Field(proto.STRING, number=2) +class RenameTagTemplateFieldEnumValueRequest(proto.Message): + r"""Request message for + [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. + + Attributes: + name (str): + Required. The name of the enum field value. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name} + new_enum_value_display_name (str): + Required. The new display name of the enum value. For + example, ``my_new_enum_value``. + """ + + name = proto.Field(proto.STRING, number=1) + + new_enum_value_display_name = proto.Field(proto.STRING, number=2) + + class DeleteTagTemplateFieldRequest(proto.Message): r"""Request message for [DeleteTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplateField]. diff --git a/google/cloud/datacatalog_v1/types/policytagmanager.py b/google/cloud/datacatalog_v1/types/policytagmanager.py new file mode 100644 index 00000000..56ad4d87 --- /dev/null +++ b/google/cloud/datacatalog_v1/types/policytagmanager.py @@ -0,0 +1,396 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.datacatalog_v1.types import timestamps +from google.protobuf import field_mask_pb2 as field_mask # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.datacatalog.v1", + manifest={ + "Taxonomy", + "PolicyTag", + "CreateTaxonomyRequest", + "DeleteTaxonomyRequest", + "UpdateTaxonomyRequest", + "ListTaxonomiesRequest", + "ListTaxonomiesResponse", + "GetTaxonomyRequest", + "CreatePolicyTagRequest", + "DeletePolicyTagRequest", + "UpdatePolicyTagRequest", + "ListPolicyTagsRequest", + "ListPolicyTagsResponse", + "GetPolicyTagRequest", + }, +) + + +class Taxonomy(proto.Message): + r"""A taxonomy is a collection of hierarchical policy tags that + classify data along a common axis. For instance a "data + sensitivity" taxonomy could contain the following policy tags: + + PII + + Account number + + Age + + SSN + + Zipcode + + Financials + + Revenue + A "data origin" taxonomy could contain the following policy + tags: + User data + + Employee data + + Partner data + + Public data + + Attributes: + name (str): + Output only. Resource name of this taxonomy in format: + "projects/{project_number}/locations/{location_id}/taxonomies/{taxonomy_id}". + Note that taxonomy_id's are unique and generated by Policy + Tag Manager. + display_name (str): + Required. User-defined name of this taxonomy. + It must: contain only unicode letters, numbers, + underscores, dashes and spaces; not start or end + with spaces; and be at most 200 bytes long when + encoded in UTF-8. + description (str): + Optional. Description of this taxonomy. It + must: contain only unicode characters, tabs, + newlines, carriage returns and page breaks; and + be at most 2000 bytes long when encoded in + UTF-8. If not set, defaults to an empty + description. + policy_tag_count (int): + Output only. Number of policy tags contained + in this taxonomy. + taxonomy_timestamps (google.cloud.datacatalog_v1.types.SystemTimestamps): + Output only. Timestamps about this taxonomy. Only + create_time and update_time are used. + activated_policy_types (Sequence[google.cloud.datacatalog_v1.types.Taxonomy.PolicyType]): + Optional. A list of policy types that are + activated for this taxonomy. If not set, + defaults to an empty list. + """ + + class PolicyType(proto.Enum): + r"""Defines policy types where the policy tags can be used for.""" + POLICY_TYPE_UNSPECIFIED = 0 + FINE_GRAINED_ACCESS_CONTROL = 1 + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + description = proto.Field(proto.STRING, number=3) + + policy_tag_count = proto.Field(proto.INT32, number=4) + + taxonomy_timestamps = proto.Field( + proto.MESSAGE, number=5, message=timestamps.SystemTimestamps, + ) + + activated_policy_types = proto.RepeatedField(proto.ENUM, number=6, enum=PolicyType,) + + +class PolicyTag(proto.Message): + r"""Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags + can be defined in a hierarchy. For example, consider the + following hierarchy: + Geolocation + + LatLong + + City + + ZipCode + Policy tag "Geolocation" contains 3 child policy tags: + "LatLong", "City", and "ZipCode". + + Attributes: + name (str): + Output only. Resource name of this policy tag in format: + "projects/{project_number}/locations/{location_id}/taxonomies/{taxonomy_id}/policyTags/{policy_tag_id}". + Both taxonomy_ids and policy_tag_ids are unique and + generated by Policy Tag Manager. + display_name (str): + Required. User-defined name of this policy + tag. It must: be unique within the parent + taxonomy; contain only unicode letters, numbers, + underscores, dashes and spaces; not start or end + with spaces; and be at most 200 bytes long when + encoded in UTF-8. + description (str): + Description of this policy tag. It must: + contain only unicode characters, tabs, newlines, + carriage returns and page breaks; and be at most + 2000 bytes long when encoded in UTF-8. If not + set, defaults to an empty description. If not + set, defaults to an empty description. + parent_policy_tag (str): + Resource name of this policy tag's parent + policy tag (e.g. for the "LatLong" policy tag in + the example above, this field contains the + resource name of the "Geolocation" policy tag). + If empty, it means this policy tag is a top + level policy tag (e.g. this field is empty for + the "Geolocation" policy tag in the example + above). If not set, defaults to an empty string. + child_policy_tags (Sequence[str]): + Output only. Resource names of child policy + tags of this policy tag. + """ + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + description = proto.Field(proto.STRING, number=3) + + parent_policy_tag = proto.Field(proto.STRING, number=4) + + child_policy_tags = proto.RepeatedField(proto.STRING, number=5) + + +class CreateTaxonomyRequest(proto.Message): + r"""Request message for + [CreateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy]. + + Attributes: + parent (str): + Required. Resource name of the project that + the taxonomy will belong to. + taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): + The taxonomy to be created. + """ + + parent = proto.Field(proto.STRING, number=1) + + taxonomy = proto.Field(proto.MESSAGE, number=2, message="Taxonomy",) + + +class DeleteTaxonomyRequest(proto.Message): + r"""Request message for + [DeleteTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy]. + + Attributes: + name (str): + Required. Resource name of the taxonomy to be + deleted. All policy tags in this taxonomy will + also be deleted. + """ + + name = proto.Field(proto.STRING, number=1) + + +class UpdateTaxonomyRequest(proto.Message): + r"""Request message for + [UpdateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy]. + + Attributes: + taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): + The taxonomy to update. Only description, display_name, and + activated policy types can be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The update mask applies to the resource. For the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are allowed + to update. + """ + + taxonomy = proto.Field(proto.MESSAGE, number=1, message="Taxonomy",) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + +class ListTaxonomiesRequest(proto.Message): + r"""Request message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + + Attributes: + parent (str): + Required. Resource name of the project to + list the taxonomies of. + page_size (int): + The maximum number of items to return. Must + be a value between 1 and 1000. If not set, + defaults to 50. + page_token (str): + The next_page_token value returned from a previous list + request, if any. If not set, defaults to an empty string. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class ListTaxonomiesResponse(proto.Message): + r"""Response message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + + Attributes: + taxonomies (Sequence[google.cloud.datacatalog_v1.types.Taxonomy]): + Taxonomies that the project contains. + next_page_token (str): + Token used to retrieve the next page of + results, or empty if there are no more results + in the list. + """ + + @property + def raw_page(self): + return self + + taxonomies = proto.RepeatedField(proto.MESSAGE, number=1, message="Taxonomy",) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class GetTaxonomyRequest(proto.Message): + r"""Request message for + [GetTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy]. + + Attributes: + name (str): + Required. Resource name of the requested + taxonomy. + """ + + name = proto.Field(proto.STRING, number=1) + + +class CreatePolicyTagRequest(proto.Message): + r"""Request message for + [CreatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag]. + + Attributes: + parent (str): + Required. Resource name of the taxonomy that + the policy tag will belong to. + policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): + The policy tag to be created. + """ + + parent = proto.Field(proto.STRING, number=1) + + policy_tag = proto.Field(proto.MESSAGE, number=2, message="PolicyTag",) + + +class DeletePolicyTagRequest(proto.Message): + r"""Request message for + [DeletePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag]. + + Attributes: + name (str): + Required. Resource name of the policy tag to + be deleted. All of its descendant policy tags + will also be deleted. + """ + + name = proto.Field(proto.STRING, number=1) + + +class UpdatePolicyTagRequest(proto.Message): + r"""Request message for + [UpdatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag]. + + Attributes: + policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): + The policy tag to update. Only the description, + display_name, and parent_policy_tag fields can be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The update mask applies to the resource. Only display_name, + description and parent_policy_tag can be updated and thus + can be listed in the mask. If update_mask is not provided, + all allowed fields (i.e. display_name, description and + parent) will be updated. For more information including the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are allowed + to update. + """ + + policy_tag = proto.Field(proto.MESSAGE, number=1, message="PolicyTag",) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + +class ListPolicyTagsRequest(proto.Message): + r"""Request message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + + Attributes: + parent (str): + Required. Resource name of the taxonomy to + list the policy tags of. + page_size (int): + The maximum number of items to return. Must + be a value between 1 and 1000. If not set, + defaults to 50. + page_token (str): + The next_page_token value returned from a previous List + request, if any. If not set, defaults to an empty string. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class ListPolicyTagsResponse(proto.Message): + r"""Response message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + + Attributes: + policy_tags (Sequence[google.cloud.datacatalog_v1.types.PolicyTag]): + The policy tags that are in the requested + taxonomy. + next_page_token (str): + Token used to retrieve the next page of + results, or empty if there are no more results + in the list. + """ + + @property + def raw_page(self): + return self + + policy_tags = proto.RepeatedField(proto.MESSAGE, number=1, message="PolicyTag",) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class GetPolicyTagRequest(proto.Message): + r"""Request message for + [GetPolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag]. + + Attributes: + name (str): + Required. Resource name of the requested + policy tag. + """ + + name = proto.Field(proto.STRING, number=1) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1/types/policytagmanagerserialization.py b/google/cloud/datacatalog_v1/types/policytagmanagerserialization.py new file mode 100644 index 00000000..43677982 --- /dev/null +++ b/google/cloud/datacatalog_v1/types/policytagmanagerserialization.py @@ -0,0 +1,210 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.datacatalog_v1.types import policytagmanager + + +__protobuf__ = proto.module( + package="google.cloud.datacatalog.v1", + manifest={ + "SerializedTaxonomy", + "SerializedPolicyTag", + "ImportTaxonomiesRequest", + "InlineSource", + "CrossRegionalSource", + "ImportTaxonomiesResponse", + "ExportTaxonomiesRequest", + "ExportTaxonomiesResponse", + }, +) + + +class SerializedTaxonomy(proto.Message): + r"""Message representing a taxonomy, including its policy tags in + hierarchy, as a nested proto. Used for taxonomy replacement, + import, and export. + + Attributes: + display_name (str): + Required. Display name of the taxonomy. At + most 200 bytes when encoded in UTF-8. + description (str): + Description of the serialized taxonomy. At + most 2000 bytes when encoded in UTF-8. If not + set, defaults to an empty description. + policy_tags (Sequence[google.cloud.datacatalog_v1.types.SerializedPolicyTag]): + Top level policy tags associated with the + taxonomy, if any. + activated_policy_types (Sequence[google.cloud.datacatalog_v1.types.Taxonomy.PolicyType]): + A list of policy types that are activated per + taxonomy. + """ + + display_name = proto.Field(proto.STRING, number=1) + + description = proto.Field(proto.STRING, number=2) + + policy_tags = proto.RepeatedField( + proto.MESSAGE, number=3, message="SerializedPolicyTag", + ) + + activated_policy_types = proto.RepeatedField( + proto.ENUM, number=4, enum=policytagmanager.Taxonomy.PolicyType, + ) + + +class SerializedPolicyTag(proto.Message): + r"""Message representing one policy tag, including all its + descendant policy tags, as a nested proto. + + Attributes: + policy_tag (str): + Resource name of the policy tag. + This field will be ignored when calling + ImportTaxonomies. + display_name (str): + Required. Display name of the policy tag. At + most 200 bytes when encoded in UTF-8. + description (str): + Description of the serialized policy tag. The + length of the description is limited to 2000 + bytes when encoded in UTF-8. If not set, + defaults to an empty description. + child_policy_tags (Sequence[google.cloud.datacatalog_v1.types.SerializedPolicyTag]): + Children of the policy tag, if any. + """ + + policy_tag = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + description = proto.Field(proto.STRING, number=3) + + child_policy_tags = proto.RepeatedField( + proto.MESSAGE, number=4, message="SerializedPolicyTag", + ) + + +class ImportTaxonomiesRequest(proto.Message): + r"""Request message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + + Attributes: + parent (str): + Required. Resource name of project that the + imported taxonomies will belong to. + inline_source (google.cloud.datacatalog_v1.types.InlineSource): + Inline source used for taxonomies import. + cross_regional_source (google.cloud.datacatalog_v1.types.CrossRegionalSource): + Cross-regional source taxonomy to be + imported. + """ + + parent = proto.Field(proto.STRING, number=1) + + inline_source = proto.Field( + proto.MESSAGE, number=2, oneof="source", message="InlineSource", + ) + + cross_regional_source = proto.Field( + proto.MESSAGE, number=3, oneof="source", message="CrossRegionalSource", + ) + + +class InlineSource(proto.Message): + r"""Inline source containing taxonomies to import. + + Attributes: + taxonomies (Sequence[google.cloud.datacatalog_v1.types.SerializedTaxonomy]): + Required. Taxonomies to be imported. + """ + + taxonomies = proto.RepeatedField( + proto.MESSAGE, number=1, message="SerializedTaxonomy", + ) + + +class CrossRegionalSource(proto.Message): + r"""Cross-regional source used to import an existing taxonomy + into a different region. + + Attributes: + taxonomy (str): + Required. The resource name of the source + taxonomy to be imported. + """ + + taxonomy = proto.Field(proto.STRING, number=1) + + +class ImportTaxonomiesResponse(proto.Message): + r"""Response message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + + Attributes: + taxonomies (Sequence[google.cloud.datacatalog_v1.types.Taxonomy]): + Taxonomies that were imported. + """ + + taxonomies = proto.RepeatedField( + proto.MESSAGE, number=1, message=policytagmanager.Taxonomy, + ) + + +class ExportTaxonomiesRequest(proto.Message): + r"""Request message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + + Attributes: + parent (str): + Required. Resource name of the project that + the exported taxonomies belong to. + taxonomies (Sequence[str]): + Required. Resource names of the taxonomies to + be exported. + serialized_taxonomies (bool): + Export taxonomies as serialized taxonomies, + which contain all the policy tags as nested + protos. + """ + + parent = proto.Field(proto.STRING, number=1) + + taxonomies = proto.RepeatedField(proto.STRING, number=2) + + serialized_taxonomies = proto.Field(proto.BOOL, number=3, oneof="destination") + + +class ExportTaxonomiesResponse(proto.Message): + r"""Response message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + + Attributes: + taxonomies (Sequence[google.cloud.datacatalog_v1.types.SerializedTaxonomy]): + List of taxonomies and policy tags as nested + protos. + """ + + taxonomies = proto.RepeatedField( + proto.MESSAGE, number=1, message="SerializedTaxonomy", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1/types/schema.py b/google/cloud/datacatalog_v1/types/schema.py index debec332..f8518289 100644 --- a/google/cloud/datacatalog_v1/types/schema.py +++ b/google/cloud/datacatalog_v1/types/schema.py @@ -28,8 +28,10 @@ class Schema(proto.Message): Attributes: columns (Sequence[google.cloud.datacatalog_v1.types.ColumnSchema]): - Required. Schema of columns. A maximum of - 10,000 columns and sub-columns can be specified. + The unified GoogleSQL-like schema of columns. + The overall maximum number of columns and nested + columns is 10,000. The maximum nested depth is + 15 levels. """ columns = proto.RepeatedField(proto.MESSAGE, number=2, message="ColumnSchema",) @@ -42,16 +44,23 @@ class ColumnSchema(proto.Message): Attributes: column (str): Required. Name of the column. + Must be a UTF-8 string without dots (.). + The maximum size is 64 bytes. type_ (str): Required. Type of the column. + Must be a UTF-8 string with the maximum size of + 128 bytes. description (str): Optional. Description of the column. Default value is an empty string. + The description must be a UTF-8 string with the + maximum size of 2000 bytes. mode (str): - Optional. A column's mode indicates whether the values in - this column are required, nullable, etc. Only ``NULLABLE``, - ``REQUIRED`` and ``REPEATED`` are supported. Default mode is - ``NULLABLE``. + Optional. A column's mode indicates if values in this column + are required, nullable, or repeated. + + Only ``NULLABLE``, ``REQUIRED``, and ``REPEATED`` values are + supported. Default mode is ``NULLABLE``. subcolumns (Sequence[google.cloud.datacatalog_v1.types.ColumnSchema]): Optional. Schema of sub-columns. A column can have zero or more sub-columns. diff --git a/google/cloud/datacatalog_v1/types/search.py b/google/cloud/datacatalog_v1/types/search.py index cdcb129f..23d8277a 100644 --- a/google/cloud/datacatalog_v1/types/search.py +++ b/google/cloud/datacatalog_v1/types/search.py @@ -19,6 +19,7 @@ from google.cloud.datacatalog_v1.types import common +from google.protobuf import timestamp_pb2 as timestamp # type: ignore __protobuf__ = proto.module( @@ -66,6 +67,9 @@ class SearchCatalogResult(proto.Message): Example: - ``//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId`` + modify_time (google.protobuf.timestamp_pb2.Timestamp): + Last-modified timestamp of the entry from the + managing system. integrated_system (google.cloud.datacatalog_v1.types.IntegratedSystem): Output only. This field indicates the entry's source system that Data Catalog integrates with, @@ -74,6 +78,15 @@ class SearchCatalogResult(proto.Message): This field indicates the entry's source system that Data Catalog does not integrate with. + fully_qualified_name (str): + Fully Qualified Name of the resource. There are two main + forms of FQNs: {system}:{project}.{dot-separated path to + resource} for non-regionalized resources + {system}:{project}.{location id}.{dot-separated path to + resource} for regionalized resources Examples: + + - dataproc_metastore:projectId.locationId.instanceId.databaseId.tableId + - bigquery:table.project_id.dataset_id.table_id """ search_result_type = proto.Field(proto.ENUM, number=1, enum="SearchResultType",) @@ -84,11 +97,15 @@ class SearchCatalogResult(proto.Message): linked_resource = proto.Field(proto.STRING, number=4) + modify_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) + integrated_system = proto.Field( proto.ENUM, number=8, oneof="system", enum=common.IntegratedSystem, ) user_specified_system = proto.Field(proto.STRING, number=9, oneof="system") + fully_qualified_name = proto.Field(proto.STRING, number=10) + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1/types/table_spec.py b/google/cloud/datacatalog_v1/types/table_spec.py index 8404dba2..233c0ec2 100644 --- a/google/cloud/datacatalog_v1/types/table_spec.py +++ b/google/cloud/datacatalog_v1/types/table_spec.py @@ -35,6 +35,7 @@ class TableSourceType(proto.Enum): TABLE_SOURCE_TYPE_UNSPECIFIED = 0 BIGQUERY_VIEW = 2 BIGQUERY_TABLE = 5 + BIGQUERY_MATERIALIZED_VIEW = 7 class BigQueryTableSpec(proto.Message): diff --git a/google/cloud/datacatalog_v1/types/tags.py b/google/cloud/datacatalog_v1/types/tags.py index e85c5036..80158ce8 100644 --- a/google/cloud/datacatalog_v1/types/tags.py +++ b/google/cloud/datacatalog_v1/types/tags.py @@ -39,30 +39,29 @@ class Tag(proto.Message): name (str): The resource name of the tag in URL format. Example: - - projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} + ``projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id}`` - where ``tag_id`` is a system-generated identifier. Note that - this Tag may not actually be stored in the location in this - name. + where ``tag_id`` is a system-generated identifier. + + Note: The tag itself might not be stored in the location + specified in its name. template (str): Required. The resource name of the tag template that this tag uses. Example: - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + ``projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}`` This field cannot be modified after creation. template_display_name (str): Output only. The display name of the tag template. column (str): - Resources like Entry can have schemas associated with them. + Resources like entry can have schemas associated with them. This scope allows users to attach tags to an individual column based on that schema. - For attaching a tag to a nested column, use ``.`` to - separate the column names. Example: - - - ``outer_column.inner_column`` + To attach a tag to a nested column, separate column names + with a dot (``.``). Example: ``column.nested_column``. fields (Sequence[google.cloud.datacatalog_v1.types.Tag.FieldsEntry]): Required. This maps the ID of a tag field to the value of and additional information about @@ -94,7 +93,8 @@ class TagField(proto.Message): type. string_value (str): Holds the value for a tag field with string - type. + type. The maximum length is 2000 UTF-8 + characters. bool_value (bool): Holds the value for a tag field with boolean type. @@ -158,13 +158,16 @@ class TagTemplate(proto.Message): The resource name of the tag template in URL format. Example: - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + ``projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}`` - Note that this TagTemplate and its child resources may not - actually be stored in the location in this name. + Note: The tag template itself and its child resources might + not be stored in the location specified in its name. display_name (str): - The display name for this template. Defaults - to an empty string. + Display name for this template. Defaults to an empty string. + + The name must contain only Unicode letters, numbers (0-9), + underscores (_), dashes (-), spaces ( ), and can't start or + end with spaces. The maximum length is 200 characters. fields (Sequence[google.cloud.datacatalog_v1.types.TagTemplate.FieldsEntry]): Required. Map of tag template field IDs to the settings for the field. This map is an exhaustive list of the allowed @@ -195,19 +198,30 @@ class TagTemplateField(proto.Message): Output only. The resource name of the tag template field in URL format. Example: - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field} + ``projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field}`` + + Note: The ``TagTemplateField`` itself might not be stored in + the location specified in its name. - Note that this TagTemplateField may not actually be stored - in the location in this name. + The name must contain only letters (a-z, A-Z), numbers + (0-9), or underscores (_), and must start with a letter or + underscore. The maximum length is 64 characters. display_name (str): - The display name for this field. Defaults to - an empty string. + The display name for this field. Defaults to an empty + string. + + The name must contain only Unicode letters, numbers (0-9), + underscores (_), dashes (-), spaces ( ), and can't start or + end with spaces. The maximum length is 200 characters. type_ (google.cloud.datacatalog_v1.types.FieldType): Required. The type of value this tag field can contain. is_required (bool): Whether this is a required field. Defaults to false. + description (str): + The description for this field. Defaults to + an empty string. order (int): The order of this field with respect to other fields in this tag template. For example, a @@ -225,6 +239,8 @@ class TagTemplateField(proto.Message): is_required = proto.Field(proto.BOOL, number=3) + description = proto.Field(proto.STRING, number=4) + order = proto.Field(proto.INT32, number=5) @@ -252,16 +268,17 @@ class EnumType(proto.Message): Attributes: allowed_values (Sequence[google.cloud.datacatalog_v1.types.FieldType.EnumType.EnumValue]): - Required on create; optional on update. The - set of allowed values for this enum. This set - must not be empty, the display names of the - values in this set must not be empty and the - display names of the values must be case- - insensitively unique within this set. Currently, - enum values can only be added to the list of - allowed values. Deletion and renaming of enum - values are not supported. Can have up to 500 - allowed values. + The set of allowed values for this enum. + + This set must not be empty and can include up to 100 allowed + values. The display names of the values in this set must not + be empty and must be case-insensitively unique within this + set. + + The order of items in this set is preserved. This field can + be used to create, remove and reorder enum values. To rename + enum values, use the ``RenameTagTemplateFieldEnumValue`` + method. """ class EnumValue(proto.Message): @@ -269,8 +286,12 @@ class EnumValue(proto.Message): Attributes: display_name (str): - Required. The display name of the enum value. - Must not be an empty string. + Required. The display name of the enum value. Must not be an + empty string. + + The name must contain only Unicode letters, numbers (0-9), + underscores (_), dashes (-), spaces ( ), and can't start or + end with spaces. The maximum length is 200 characters. """ display_name = proto.Field(proto.STRING, number=1) diff --git a/scripts/fixup_datacatalog_v1_keywords.py b/scripts/fixup_datacatalog_v1_keywords.py index 04befa38..d4ee3ca3 100644 --- a/scripts/fixup_datacatalog_v1_keywords.py +++ b/scripts/fixup_datacatalog_v1_keywords.py @@ -43,31 +43,44 @@ class datacatalogCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'create_entry': ('parent', 'entry_id', 'entry', ), 'create_entry_group': ('parent', 'entry_group_id', 'entry_group', ), + 'create_policy_tag': ('parent', 'policy_tag', ), 'create_tag': ('parent', 'tag', ), 'create_tag_template': ('parent', 'tag_template_id', 'tag_template', ), 'create_tag_template_field': ('parent', 'tag_template_field_id', 'tag_template_field', ), + 'create_taxonomy': ('parent', 'taxonomy', ), 'delete_entry': ('name', ), 'delete_entry_group': ('name', 'force', ), + 'delete_policy_tag': ('name', ), 'delete_tag': ('name', ), 'delete_tag_template': ('name', 'force', ), 'delete_tag_template_field': ('name', 'force', ), + 'delete_taxonomy': ('name', ), + 'export_taxonomies': ('parent', 'taxonomies', 'serialized_taxonomies', ), 'get_entry': ('name', ), 'get_entry_group': ('name', 'read_mask', ), 'get_iam_policy': ('resource', 'options', ), + 'get_policy_tag': ('name', ), 'get_tag_template': ('name', ), + 'get_taxonomy': ('name', ), + 'import_taxonomies': ('parent', 'inline_source', 'cross_regional_source', ), 'list_entries': ('parent', 'page_size', 'page_token', 'read_mask', ), 'list_entry_groups': ('parent', 'page_size', 'page_token', ), + 'list_policy_tags': ('parent', 'page_size', 'page_token', ), 'list_tags': ('parent', 'page_size', 'page_token', ), - 'lookup_entry': ('linked_resource', 'sql_resource', ), + 'list_taxonomies': ('parent', 'page_size', 'page_token', ), + 'lookup_entry': ('linked_resource', 'sql_resource', 'fully_qualified_name', ), 'rename_tag_template_field': ('name', 'new_tag_template_field_id', ), + 'rename_tag_template_field_enum_value': ('name', 'new_enum_value_display_name', ), 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', ), 'set_iam_policy': ('resource', 'policy', ), 'test_iam_permissions': ('resource', 'permissions', ), 'update_entry': ('entry', 'update_mask', ), 'update_entry_group': ('entry_group', 'update_mask', ), + 'update_policy_tag': ('policy_tag', 'update_mask', ), 'update_tag': ('tag', 'update_mask', ), 'update_tag_template': ('tag_template', 'update_mask', ), 'update_tag_template_field': ('name', 'tag_template_field', 'update_mask', ), + 'update_taxonomy': ('taxonomy', 'update_mask', ), } diff --git a/synth.metadata b/synth.metadata index 7e914094..8dd8248e 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "15c5e21948ff6fbe41f91bdf04f6252f91a12d59", - "internalRef": "364894175" + "sha": "a1ab4d44db02d59ff58810c6d4182d84e4b9abaa", + "internalRef": "365620142" } }, { @@ -111,6 +111,8 @@ "docs/_templates/layout.html", "docs/conf.py", "docs/datacatalog_v1/data_catalog.rst", + "docs/datacatalog_v1/policy_tag_manager.rst", + "docs/datacatalog_v1/policy_tag_manager_serialization.rst", "docs/datacatalog_v1/services.rst", "docs/datacatalog_v1/types.rst", "docs/datacatalog_v1beta1/data_catalog.rst", @@ -123,8 +125,11 @@ "google/cloud/datacatalog/py.typed", "google/cloud/datacatalog_v1/__init__.py", "google/cloud/datacatalog_v1/proto/common.proto", + "google/cloud/datacatalog_v1/proto/data_source.proto", "google/cloud/datacatalog_v1/proto/datacatalog.proto", "google/cloud/datacatalog_v1/proto/gcs_fileset_spec.proto", + "google/cloud/datacatalog_v1/proto/policytagmanager.proto", + "google/cloud/datacatalog_v1/proto/policytagmanagerserialization.proto", "google/cloud/datacatalog_v1/proto/schema.proto", "google/cloud/datacatalog_v1/proto/search.proto", "google/cloud/datacatalog_v1/proto/table_spec.proto", @@ -140,10 +145,28 @@ "google/cloud/datacatalog_v1/services/data_catalog/transports/base.py", "google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py", "google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py", + "google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py", + "google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py", + "google/cloud/datacatalog_v1/services/policy_tag_manager/client.py", + "google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py", + "google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py", + "google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py", + "google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py", + "google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py", + "google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py", + "google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py", + "google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py", + "google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py", + "google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py", + "google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py", + "google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py", "google/cloud/datacatalog_v1/types/__init__.py", "google/cloud/datacatalog_v1/types/common.py", + "google/cloud/datacatalog_v1/types/data_source.py", "google/cloud/datacatalog_v1/types/datacatalog.py", "google/cloud/datacatalog_v1/types/gcs_fileset_spec.py", + "google/cloud/datacatalog_v1/types/policytagmanager.py", + "google/cloud/datacatalog_v1/types/policytagmanagerserialization.py", "google/cloud/datacatalog_v1/types/schema.py", "google/cloud/datacatalog_v1/types/search.py", "google/cloud/datacatalog_v1/types/table_spec.py", @@ -215,6 +238,8 @@ "testing/.gitignore", "tests/unit/gapic/datacatalog_v1/__init__.py", "tests/unit/gapic/datacatalog_v1/test_data_catalog.py", + "tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py", + "tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py", "tests/unit/gapic/datacatalog_v1beta1/__init__.py", "tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py", "tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py", diff --git a/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/tests/unit/gapic/datacatalog_v1/test_data_catalog.py index 301b8027..f4966de4 100644 --- a/tests/unit/gapic/datacatalog_v1/test_data_catalog.py +++ b/tests/unit/gapic/datacatalog_v1/test_data_catalog.py @@ -37,6 +37,7 @@ from google.cloud.datacatalog_v1.services.data_catalog import pagers from google.cloud.datacatalog_v1.services.data_catalog import transports from google.cloud.datacatalog_v1.types import common +from google.cloud.datacatalog_v1.types import data_source from google.cloud.datacatalog_v1.types import datacatalog from google.cloud.datacatalog_v1.types import gcs_fileset_spec from google.cloud.datacatalog_v1.types import schema @@ -2123,6 +2124,7 @@ def test_create_entry( call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", type_=datacatalog.EntryType.TABLE, @@ -2130,6 +2132,9 @@ def test_create_entry( gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] ), + database_table_spec=datacatalog.DatabaseTableSpec( + type_=datacatalog.DatabaseTableSpec.TableType.NATIVE + ), ) response = client.create_entry(request) @@ -2148,6 +2153,8 @@ def test_create_entry( assert response.linked_resource == "linked_resource_value" + assert response.fully_qualified_name == "fully_qualified_name_value" + assert response.display_name == "display_name_value" assert response.description == "description_value" @@ -2192,6 +2199,7 @@ async def test_create_entry_async( datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", ) @@ -2212,6 +2220,8 @@ async def test_create_entry_async( assert response.linked_resource == "linked_resource_value" + assert response.fully_qualified_name == "fully_qualified_name_value" + assert response.display_name == "display_name_value" assert response.description == "description_value" @@ -2375,6 +2385,7 @@ def test_update_entry( call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", type_=datacatalog.EntryType.TABLE, @@ -2382,6 +2393,9 @@ def test_update_entry( gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] ), + database_table_spec=datacatalog.DatabaseTableSpec( + type_=datacatalog.DatabaseTableSpec.TableType.NATIVE + ), ) response = client.update_entry(request) @@ -2400,6 +2414,8 @@ def test_update_entry( assert response.linked_resource == "linked_resource_value" + assert response.fully_qualified_name == "fully_qualified_name_value" + assert response.display_name == "display_name_value" assert response.description == "description_value" @@ -2444,6 +2460,7 @@ async def test_update_entry_async( datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", ) @@ -2464,6 +2481,8 @@ async def test_update_entry_async( assert response.linked_resource == "linked_resource_value" + assert response.fully_qualified_name == "fully_qualified_name_value" + assert response.display_name == "display_name_value" assert response.description == "description_value" @@ -2813,6 +2832,7 @@ def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryReq call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", type_=datacatalog.EntryType.TABLE, @@ -2820,6 +2840,9 @@ def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryReq gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] ), + database_table_spec=datacatalog.DatabaseTableSpec( + type_=datacatalog.DatabaseTableSpec.TableType.NATIVE + ), ) response = client.get_entry(request) @@ -2838,6 +2861,8 @@ def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryReq assert response.linked_resource == "linked_resource_value" + assert response.fully_qualified_name == "fully_qualified_name_value" + assert response.display_name == "display_name_value" assert response.description == "description_value" @@ -2882,6 +2907,7 @@ async def test_get_entry_async( datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", ) @@ -2902,6 +2928,8 @@ async def test_get_entry_async( assert response.linked_resource == "linked_resource_value" + assert response.fully_qualified_name == "fully_qualified_name_value" + assert response.display_name == "display_name_value" assert response.description == "description_value" @@ -3043,6 +3071,7 @@ def test_lookup_entry( call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", type_=datacatalog.EntryType.TABLE, @@ -3050,6 +3079,9 @@ def test_lookup_entry( gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] ), + database_table_spec=datacatalog.DatabaseTableSpec( + type_=datacatalog.DatabaseTableSpec.TableType.NATIVE + ), ) response = client.lookup_entry(request) @@ -3068,6 +3100,8 @@ def test_lookup_entry( assert response.linked_resource == "linked_resource_value" + assert response.fully_qualified_name == "fully_qualified_name_value" + assert response.display_name == "display_name_value" assert response.description == "description_value" @@ -3112,6 +3146,7 @@ async def test_lookup_entry_async( datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", ) @@ -3132,6 +3167,8 @@ async def test_lookup_entry_async( assert response.linked_resource == "linked_resource_value" + assert response.fully_qualified_name == "fully_qualified_name_value" + assert response.display_name == "display_name_value" assert response.description == "description_value" @@ -4422,6 +4459,7 @@ def test_create_tag_template_field( name="name_value", display_name="display_name_value", is_required=True, + description="description_value", order=540, ) @@ -4443,6 +4481,8 @@ def test_create_tag_template_field( assert response.is_required is True + assert response.description == "description_value" + assert response.order == 540 @@ -4491,6 +4531,7 @@ async def test_create_tag_template_field_async( name="name_value", display_name="display_name_value", is_required=True, + description="description_value", order=540, ) ) @@ -4512,6 +4553,8 @@ async def test_create_tag_template_field_async( assert response.is_required is True + assert response.description == "description_value" + assert response.order == 540 @@ -4688,6 +4731,7 @@ def test_update_tag_template_field( name="name_value", display_name="display_name_value", is_required=True, + description="description_value", order=540, ) @@ -4709,6 +4753,8 @@ def test_update_tag_template_field( assert response.is_required is True + assert response.description == "description_value" + assert response.order == 540 @@ -4757,6 +4803,7 @@ async def test_update_tag_template_field_async( name="name_value", display_name="display_name_value", is_required=True, + description="description_value", order=540, ) ) @@ -4778,6 +4825,8 @@ async def test_update_tag_template_field_async( assert response.is_required is True + assert response.description == "description_value" + assert response.order == 540 @@ -4954,6 +5003,7 @@ def test_rename_tag_template_field( name="name_value", display_name="display_name_value", is_required=True, + description="description_value", order=540, ) @@ -4975,6 +5025,8 @@ def test_rename_tag_template_field( assert response.is_required is True + assert response.description == "description_value" + assert response.order == 540 @@ -5023,6 +5075,7 @@ async def test_rename_tag_template_field_async( name="name_value", display_name="display_name_value", is_required=True, + description="description_value", order=540, ) ) @@ -5044,6 +5097,8 @@ async def test_rename_tag_template_field_async( assert response.is_required is True + assert response.description == "description_value" + assert response.order == 540 @@ -5192,6 +5247,275 @@ async def test_rename_tag_template_field_flattened_error_async(): ) +def test_rename_tag_template_field_enum_value( + transport: str = "grpc", + request_type=datacatalog.RenameTagTemplateFieldEnumValueRequest, +): + client = DataCatalogClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField( + name="name_value", + display_name="display_name_value", + is_required=True, + description="description_value", + order=540, + ) + + response = client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, tags.TagTemplateField) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.is_required is True + + assert response.description == "description_value" + + assert response.order == 540 + + +def test_rename_tag_template_field_enum_value_from_dict(): + test_rename_tag_template_field_enum_value(request_type=dict) + + +def test_rename_tag_template_field_enum_value_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), "__call__" + ) as call: + client.rename_tag_template_field_enum_value() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_async( + transport: str = "grpc_asyncio", + request_type=datacatalog.RenameTagTemplateFieldEnumValueRequest, +): + client = DataCatalogAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tags.TagTemplateField( + name="name_value", + display_name="display_name_value", + is_required=True, + description="description_value", + order=540, + ) + ) + + response = await client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.is_required is True + + assert response.description == "description_value" + + assert response.order == 540 + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_async_from_dict(): + await test_rename_tag_template_field_enum_value_async(request_type=dict) + + +def test_rename_tag_template_field_enum_value_field_headers(): + client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RenameTagTemplateFieldEnumValueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), "__call__" + ) as call: + call.return_value = tags.TagTemplateField() + + client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_field_headers_async(): + client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RenameTagTemplateFieldEnumValueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tags.TagTemplateField() + ) + + await client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_rename_tag_template_field_enum_value_flattened(): + client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rename_tag_template_field_enum_value( + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert ( + args[0].new_enum_value_display_name == "new_enum_value_display_name_value" + ) + + +def test_rename_tag_template_field_enum_value_flattened_error(): + client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rename_tag_template_field_enum_value( + datacatalog.RenameTagTemplateFieldEnumValueRequest(), + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_flattened_async(): + client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tags.TagTemplateField() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rename_tag_template_field_enum_value( + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert ( + args[0].new_enum_value_display_name == "new_enum_value_display_name_value" + ) + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_flattened_error_async(): + client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rename_tag_template_field_enum_value( + datacatalog.RenameTagTemplateFieldEnumValueRequest(), + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + def test_delete_tag_template_field( transport: str = "grpc", request_type=datacatalog.DeleteTagTemplateFieldRequest ): @@ -7105,6 +7429,7 @@ def test_data_catalog_base_transport(): "create_tag_template_field", "update_tag_template_field", "rename_tag_template_field", + "rename_tag_template_field_enum_value", "delete_tag_template_field", "create_tag", "update_tag", @@ -7491,8 +7816,43 @@ def test_parse_tag_template_field_path(): assert expected == actual +def test_tag_template_field_enum_value_path(): + project = "whelk" + location = "octopus" + tag_template = "oyster" + tag_template_field_id = "nudibranch" + enum_value_display_name = "cuttlefish" + + expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name}".format( + project=project, + location=location, + tag_template=tag_template, + tag_template_field_id=tag_template_field_id, + enum_value_display_name=enum_value_display_name, + ) + actual = DataCatalogClient.tag_template_field_enum_value_path( + project, location, tag_template, tag_template_field_id, enum_value_display_name + ) + assert expected == actual + + +def test_parse_tag_template_field_enum_value_path(): + expected = { + "project": "mussel", + "location": "winkle", + "tag_template": "nautilus", + "tag_template_field_id": "scallop", + "enum_value_display_name": "abalone", + } + path = DataCatalogClient.tag_template_field_enum_value_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_tag_template_field_enum_value_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -7503,7 +7863,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "clam", } path = DataCatalogClient.common_billing_account_path(**expected) @@ -7513,7 +7873,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "whelk" expected = "folders/{folder}".format(folder=folder,) actual = DataCatalogClient.common_folder_path(folder) @@ -7522,7 +7882,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "octopus", } path = DataCatalogClient.common_folder_path(**expected) @@ -7532,7 +7892,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "oyster" expected = "organizations/{organization}".format(organization=organization,) actual = DataCatalogClient.common_organization_path(organization) @@ -7541,7 +7901,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "nudibranch", } path = DataCatalogClient.common_organization_path(**expected) @@ -7551,7 +7911,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "cuttlefish" expected = "projects/{project}".format(project=project,) actual = DataCatalogClient.common_project_path(project) @@ -7560,7 +7920,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "mussel", } path = DataCatalogClient.common_project_path(**expected) @@ -7570,8 +7930,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -7582,8 +7942,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "scallop", + "location": "abalone", } path = DataCatalogClient.common_location_path(**expected) diff --git a/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py b/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py new file mode 100644 index 00000000..66033d53 --- /dev/null +++ b/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py @@ -0,0 +1,4195 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.datacatalog_v1.services.policy_tag_manager import ( + PolicyTagManagerAsyncClient, +) +from google.cloud.datacatalog_v1.services.policy_tag_manager import ( + PolicyTagManagerClient, +) +from google.cloud.datacatalog_v1.services.policy_tag_manager import pagers +from google.cloud.datacatalog_v1.services.policy_tag_manager import transports +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import timestamps +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import options_pb2 as options # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.type import expr_pb2 as expr # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PolicyTagManagerClient._get_default_mtls_endpoint(None) is None + assert ( + PolicyTagManagerClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + PolicyTagManagerClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + PolicyTagManagerClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PolicyTagManagerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PolicyTagManagerClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [PolicyTagManagerClient, PolicyTagManagerAsyncClient,] +) +def test_policy_tag_manager_client_from_service_account_info(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "datacatalog.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [PolicyTagManagerClient, PolicyTagManagerAsyncClient,] +) +def test_policy_tag_manager_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "datacatalog.googleapis.com:443" + + +def test_policy_tag_manager_client_get_transport_class(): + transport = PolicyTagManagerClient.get_transport_class() + available_transports = [ + transports.PolicyTagManagerGrpcTransport, + ] + assert transport in available_transports + + transport = PolicyTagManagerClient.get_transport_class("grpc") + assert transport == transports.PolicyTagManagerGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), + ( + PolicyTagManagerAsyncClient, + transports.PolicyTagManagerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + PolicyTagManagerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerClient), +) +@mock.patch.object( + PolicyTagManagerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerAsyncClient), +) +def test_policy_tag_manager_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PolicyTagManagerClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PolicyTagManagerClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + PolicyTagManagerClient, + transports.PolicyTagManagerGrpcTransport, + "grpc", + "true", + ), + ( + PolicyTagManagerAsyncClient, + transports.PolicyTagManagerGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + PolicyTagManagerClient, + transports.PolicyTagManagerGrpcTransport, + "grpc", + "false", + ), + ( + PolicyTagManagerAsyncClient, + transports.PolicyTagManagerGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + PolicyTagManagerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerClient), +) +@mock.patch.object( + PolicyTagManagerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_policy_tag_manager_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), + ( + PolicyTagManagerAsyncClient, + transports.PolicyTagManagerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_policy_tag_manager_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), + ( + PolicyTagManagerAsyncClient, + transports.PolicyTagManagerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_policy_tag_manager_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_policy_tag_manager_client_client_options_from_dict(): + with mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PolicyTagManagerClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_create_taxonomy( + transport: str = "grpc", request_type=policytagmanager.CreateTaxonomyRequest +): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy( + name="name_value", + display_name="display_name_value", + description="description_value", + policy_tag_count=1715, + activated_policy_types=[ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ], + ) + + response = client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.CreateTaxonomyRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, policytagmanager.Taxonomy) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.policy_tag_count == 1715 + + assert response.activated_policy_types == [ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ] + + +def test_create_taxonomy_from_dict(): + test_create_taxonomy(request_type=dict) + + +def test_create_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: + client.create_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.CreateTaxonomyRequest() + + +@pytest.mark.asyncio +async def test_create_taxonomy_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.CreateTaxonomyRequest +): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy( + name="name_value", + display_name="display_name_value", + description="description_value", + policy_tag_count=1715, + activated_policy_types=[ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ], + ) + ) + + response = await client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.CreateTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.policy_tag_count == 1715 + + assert response.activated_policy_types == [ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ] + + +@pytest.mark.asyncio +async def test_create_taxonomy_async_from_dict(): + await test_create_taxonomy_async(request_type=dict) + + +def test_create_taxonomy_field_headers(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreateTaxonomyRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: + call.return_value = policytagmanager.Taxonomy() + + client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreateTaxonomyRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy() + ) + + await client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_taxonomy_flattened(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_taxonomy( + parent="parent_value", + taxonomy=policytagmanager.Taxonomy(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].taxonomy == policytagmanager.Taxonomy(name="name_value") + + +def test_create_taxonomy_flattened_error(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_taxonomy( + policytagmanager.CreateTaxonomyRequest(), + parent="parent_value", + taxonomy=policytagmanager.Taxonomy(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_taxonomy( + parent="parent_value", + taxonomy=policytagmanager.Taxonomy(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].taxonomy == policytagmanager.Taxonomy(name="name_value") + + +@pytest.mark.asyncio +async def test_create_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_taxonomy( + policytagmanager.CreateTaxonomyRequest(), + parent="parent_value", + taxonomy=policytagmanager.Taxonomy(name="name_value"), + ) + + +def test_delete_taxonomy( + transport: str = "grpc", request_type=policytagmanager.DeleteTaxonomyRequest +): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.DeleteTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_taxonomy_from_dict(): + test_delete_taxonomy(request_type=dict) + + +def test_delete_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: + client.delete_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.DeleteTaxonomyRequest() + + +@pytest.mark.asyncio +async def test_delete_taxonomy_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.DeleteTaxonomyRequest +): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.DeleteTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_taxonomy_async_from_dict(): + await test_delete_taxonomy_async(request_type=dict) + + +def test_delete_taxonomy_field_headers(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeleteTaxonomyRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: + call.return_value = None + + client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeleteTaxonomyRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_taxonomy_flattened(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_taxonomy(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_taxonomy_flattened_error(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_taxonomy( + policytagmanager.DeleteTaxonomyRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_taxonomy(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_taxonomy( + policytagmanager.DeleteTaxonomyRequest(), name="name_value", + ) + + +def test_update_taxonomy( + transport: str = "grpc", request_type=policytagmanager.UpdateTaxonomyRequest +): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy( + name="name_value", + display_name="display_name_value", + description="description_value", + policy_tag_count=1715, + activated_policy_types=[ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ], + ) + + response = client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.UpdateTaxonomyRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, policytagmanager.Taxonomy) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.policy_tag_count == 1715 + + assert response.activated_policy_types == [ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ] + + +def test_update_taxonomy_from_dict(): + test_update_taxonomy(request_type=dict) + + +def test_update_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: + client.update_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.UpdateTaxonomyRequest() + + +@pytest.mark.asyncio +async def test_update_taxonomy_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.UpdateTaxonomyRequest +): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy( + name="name_value", + display_name="display_name_value", + description="description_value", + policy_tag_count=1715, + activated_policy_types=[ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ], + ) + ) + + response = await client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.UpdateTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.policy_tag_count == 1715 + + assert response.activated_policy_types == [ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ] + + +@pytest.mark.asyncio +async def test_update_taxonomy_async_from_dict(): + await test_update_taxonomy_async(request_type=dict) + + +def test_update_taxonomy_field_headers(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdateTaxonomyRequest() + request.taxonomy.name = "taxonomy.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: + call.return_value = policytagmanager.Taxonomy() + + client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "taxonomy.name=taxonomy.name/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_update_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdateTaxonomyRequest() + request.taxonomy.name = "taxonomy.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy() + ) + + await client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "taxonomy.name=taxonomy.name/value",) in kw[ + "metadata" + ] + + +def test_update_taxonomy_flattened(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_taxonomy(taxonomy=policytagmanager.Taxonomy(name="name_value"),) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].taxonomy == policytagmanager.Taxonomy(name="name_value") + + +def test_update_taxonomy_flattened_error(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_taxonomy( + policytagmanager.UpdateTaxonomyRequest(), + taxonomy=policytagmanager.Taxonomy(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_update_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_taxonomy( + taxonomy=policytagmanager.Taxonomy(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].taxonomy == policytagmanager.Taxonomy(name="name_value") + + +@pytest.mark.asyncio +async def test_update_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_taxonomy( + policytagmanager.UpdateTaxonomyRequest(), + taxonomy=policytagmanager.Taxonomy(name="name_value"), + ) + + +def test_list_taxonomies( + transport: str = "grpc", request_type=policytagmanager.ListTaxonomiesRequest +): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListTaxonomiesResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.ListTaxonomiesRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListTaxonomiesPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_taxonomies_from_dict(): + test_list_taxonomies(request_type=dict) + + +def test_list_taxonomies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + client.list_taxonomies() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.ListTaxonomiesRequest() + + +@pytest.mark.asyncio +async def test_list_taxonomies_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.ListTaxonomiesRequest +): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.ListTaxonomiesResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.ListTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTaxonomiesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_taxonomies_async_from_dict(): + await test_list_taxonomies_async(request_type=dict) + + +def test_list_taxonomies_field_headers(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListTaxonomiesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + call.return_value = policytagmanager.ListTaxonomiesResponse() + + client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_taxonomies_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListTaxonomiesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.ListTaxonomiesResponse() + ) + + await client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_taxonomies_flattened(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListTaxonomiesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_taxonomies(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_taxonomies_flattened_error(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_taxonomies( + policytagmanager.ListTaxonomiesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_taxonomies_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListTaxonomiesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.ListTaxonomiesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_taxonomies(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_taxonomies_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_taxonomies( + policytagmanager.ListTaxonomiesRequest(), parent="parent_value", + ) + + +def test_list_taxonomies_pager(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token="abc", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], next_page_token="def", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(),], next_page_token="ghi", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(), policytagmanager.Taxonomy(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_taxonomies(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, policytagmanager.Taxonomy) for i in results) + + +def test_list_taxonomies_pages(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token="abc", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], next_page_token="def", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(),], next_page_token="ghi", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(), policytagmanager.Taxonomy(),], + ), + RuntimeError, + ) + pages = list(client.list_taxonomies(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_taxonomies_async_pager(): + client = PolicyTagManagerAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token="abc", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], next_page_token="def", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(),], next_page_token="ghi", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(), policytagmanager.Taxonomy(),], + ), + RuntimeError, + ) + async_pager = await client.list_taxonomies(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, policytagmanager.Taxonomy) for i in responses) + + +@pytest.mark.asyncio +async def test_list_taxonomies_async_pages(): + client = PolicyTagManagerAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token="abc", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], next_page_token="def", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(),], next_page_token="ghi", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(), policytagmanager.Taxonomy(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_taxonomies(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_taxonomy( + transport: str = "grpc", request_type=policytagmanager.GetTaxonomyRequest +): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy( + name="name_value", + display_name="display_name_value", + description="description_value", + policy_tag_count=1715, + activated_policy_types=[ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ], + ) + + response = client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.GetTaxonomyRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, policytagmanager.Taxonomy) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.policy_tag_count == 1715 + + assert response.activated_policy_types == [ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ] + + +def test_get_taxonomy_from_dict(): + test_get_taxonomy(request_type=dict) + + +def test_get_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: + client.get_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.GetTaxonomyRequest() + + +@pytest.mark.asyncio +async def test_get_taxonomy_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.GetTaxonomyRequest +): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy( + name="name_value", + display_name="display_name_value", + description="description_value", + policy_tag_count=1715, + activated_policy_types=[ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ], + ) + ) + + response = await client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.GetTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.policy_tag_count == 1715 + + assert response.activated_policy_types == [ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ] + + +@pytest.mark.asyncio +async def test_get_taxonomy_async_from_dict(): + await test_get_taxonomy_async(request_type=dict) + + +def test_get_taxonomy_field_headers(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetTaxonomyRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: + call.return_value = policytagmanager.Taxonomy() + + client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetTaxonomyRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy() + ) + + await client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_taxonomy_flattened(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_taxonomy(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_taxonomy_flattened_error(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_taxonomy( + policytagmanager.GetTaxonomyRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_taxonomy(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_taxonomy( + policytagmanager.GetTaxonomyRequest(), name="name_value", + ) + + +def test_create_policy_tag( + transport: str = "grpc", request_type=policytagmanager.CreatePolicyTagRequest +): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag( + name="name_value", + display_name="display_name_value", + description="description_value", + parent_policy_tag="parent_policy_tag_value", + child_policy_tags=["child_policy_tags_value"], + ) + + response = client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.CreatePolicyTagRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, policytagmanager.PolicyTag) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.parent_policy_tag == "parent_policy_tag_value" + + assert response.child_policy_tags == ["child_policy_tags_value"] + + +def test_create_policy_tag_from_dict(): + test_create_policy_tag(request_type=dict) + + +def test_create_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), "__call__" + ) as call: + client.create_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.CreatePolicyTagRequest() + + +@pytest.mark.asyncio +async def test_create_policy_tag_async( + transport: str = "grpc_asyncio", + request_type=policytagmanager.CreatePolicyTagRequest, +): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag( + name="name_value", + display_name="display_name_value", + description="description_value", + parent_policy_tag="parent_policy_tag_value", + child_policy_tags=["child_policy_tags_value"], + ) + ) + + response = await client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.CreatePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.parent_policy_tag == "parent_policy_tag_value" + + assert response.child_policy_tags == ["child_policy_tags_value"] + + +@pytest.mark.asyncio +async def test_create_policy_tag_async_from_dict(): + await test_create_policy_tag_async(request_type=dict) + + +def test_create_policy_tag_field_headers(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreatePolicyTagRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), "__call__" + ) as call: + call.return_value = policytagmanager.PolicyTag() + + client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreatePolicyTagRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag() + ) + + await client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_policy_tag_flattened(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_policy_tag( + parent="parent_value", + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].policy_tag == policytagmanager.PolicyTag(name="name_value") + + +def test_create_policy_tag_flattened_error(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_policy_tag( + policytagmanager.CreatePolicyTagRequest(), + parent="parent_value", + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_policy_tag( + parent="parent_value", + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].policy_tag == policytagmanager.PolicyTag(name="name_value") + + +@pytest.mark.asyncio +async def test_create_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_policy_tag( + policytagmanager.CreatePolicyTagRequest(), + parent="parent_value", + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + +def test_delete_policy_tag( + transport: str = "grpc", request_type=policytagmanager.DeletePolicyTagRequest +): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.DeletePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_policy_tag_from_dict(): + test_delete_policy_tag(request_type=dict) + + +def test_delete_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), "__call__" + ) as call: + client.delete_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.DeletePolicyTagRequest() + + +@pytest.mark.asyncio +async def test_delete_policy_tag_async( + transport: str = "grpc_asyncio", + request_type=policytagmanager.DeletePolicyTagRequest, +): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.DeletePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_policy_tag_async_from_dict(): + await test_delete_policy_tag_async(request_type=dict) + + +def test_delete_policy_tag_field_headers(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeletePolicyTagRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), "__call__" + ) as call: + call.return_value = None + + client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeletePolicyTagRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_policy_tag_flattened(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_policy_tag(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_policy_tag_flattened_error(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_policy_tag( + policytagmanager.DeletePolicyTagRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_policy_tag(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_policy_tag( + policytagmanager.DeletePolicyTagRequest(), name="name_value", + ) + + +def test_update_policy_tag( + transport: str = "grpc", request_type=policytagmanager.UpdatePolicyTagRequest +): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag( + name="name_value", + display_name="display_name_value", + description="description_value", + parent_policy_tag="parent_policy_tag_value", + child_policy_tags=["child_policy_tags_value"], + ) + + response = client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.UpdatePolicyTagRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, policytagmanager.PolicyTag) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.parent_policy_tag == "parent_policy_tag_value" + + assert response.child_policy_tags == ["child_policy_tags_value"] + + +def test_update_policy_tag_from_dict(): + test_update_policy_tag(request_type=dict) + + +def test_update_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), "__call__" + ) as call: + client.update_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.UpdatePolicyTagRequest() + + +@pytest.mark.asyncio +async def test_update_policy_tag_async( + transport: str = "grpc_asyncio", + request_type=policytagmanager.UpdatePolicyTagRequest, +): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag( + name="name_value", + display_name="display_name_value", + description="description_value", + parent_policy_tag="parent_policy_tag_value", + child_policy_tags=["child_policy_tags_value"], + ) + ) + + response = await client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.UpdatePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.parent_policy_tag == "parent_policy_tag_value" + + assert response.child_policy_tags == ["child_policy_tags_value"] + + +@pytest.mark.asyncio +async def test_update_policy_tag_async_from_dict(): + await test_update_policy_tag_async(request_type=dict) + + +def test_update_policy_tag_field_headers(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdatePolicyTagRequest() + request.policy_tag.name = "policy_tag.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), "__call__" + ) as call: + call.return_value = policytagmanager.PolicyTag() + + client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "policy_tag.name=policy_tag.name/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_update_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdatePolicyTagRequest() + request.policy_tag.name = "policy_tag.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag() + ) + + await client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "policy_tag.name=policy_tag.name/value",) in kw[ + "metadata" + ] + + +def test_update_policy_tag_flattened(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_policy_tag( + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].policy_tag == policytagmanager.PolicyTag(name="name_value") + + +def test_update_policy_tag_flattened_error(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_policy_tag( + policytagmanager.UpdatePolicyTagRequest(), + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_update_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_policy_tag( + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].policy_tag == policytagmanager.PolicyTag(name="name_value") + + +@pytest.mark.asyncio +async def test_update_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_policy_tag( + policytagmanager.UpdatePolicyTagRequest(), + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + +def test_list_policy_tags( + transport: str = "grpc", request_type=policytagmanager.ListPolicyTagsRequest +): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListPolicyTagsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.ListPolicyTagsRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListPolicyTagsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_policy_tags_from_dict(): + test_list_policy_tags(request_type=dict) + + +def test_list_policy_tags_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + client.list_policy_tags() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.ListPolicyTagsRequest() + + +@pytest.mark.asyncio +async def test_list_policy_tags_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.ListPolicyTagsRequest +): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.ListPolicyTagsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.ListPolicyTagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPolicyTagsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_policy_tags_async_from_dict(): + await test_list_policy_tags_async(request_type=dict) + + +def test_list_policy_tags_field_headers(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListPolicyTagsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + call.return_value = policytagmanager.ListPolicyTagsResponse() + + client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_policy_tags_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListPolicyTagsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.ListPolicyTagsResponse() + ) + + await client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_policy_tags_flattened(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListPolicyTagsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_policy_tags(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_policy_tags_flattened_error(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_policy_tags( + policytagmanager.ListPolicyTagsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_policy_tags_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListPolicyTagsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.ListPolicyTagsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_policy_tags(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_policy_tags_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_policy_tags( + policytagmanager.ListPolicyTagsRequest(), parent="parent_value", + ) + + +def test_list_policy_tags_pager(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token="abc", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], next_page_token="def", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[policytagmanager.PolicyTag(),], next_page_token="ghi", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_policy_tags(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, policytagmanager.PolicyTag) for i in results) + + +def test_list_policy_tags_pages(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token="abc", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], next_page_token="def", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[policytagmanager.PolicyTag(),], next_page_token="ghi", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + pages = list(client.list_policy_tags(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_policy_tags_async_pager(): + client = PolicyTagManagerAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token="abc", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], next_page_token="def", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[policytagmanager.PolicyTag(),], next_page_token="ghi", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_policy_tags(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, policytagmanager.PolicyTag) for i in responses) + + +@pytest.mark.asyncio +async def test_list_policy_tags_async_pages(): + client = PolicyTagManagerAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token="abc", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], next_page_token="def", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[policytagmanager.PolicyTag(),], next_page_token="ghi", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_policy_tags(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_policy_tag( + transport: str = "grpc", request_type=policytagmanager.GetPolicyTagRequest +): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag( + name="name_value", + display_name="display_name_value", + description="description_value", + parent_policy_tag="parent_policy_tag_value", + child_policy_tags=["child_policy_tags_value"], + ) + + response = client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.GetPolicyTagRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, policytagmanager.PolicyTag) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.parent_policy_tag == "parent_policy_tag_value" + + assert response.child_policy_tags == ["child_policy_tags_value"] + + +def test_get_policy_tag_from_dict(): + test_get_policy_tag(request_type=dict) + + +def test_get_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: + client.get_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.GetPolicyTagRequest() + + +@pytest.mark.asyncio +async def test_get_policy_tag_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.GetPolicyTagRequest +): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag( + name="name_value", + display_name="display_name_value", + description="description_value", + parent_policy_tag="parent_policy_tag_value", + child_policy_tags=["child_policy_tags_value"], + ) + ) + + response = await client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanager.GetPolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.parent_policy_tag == "parent_policy_tag_value" + + assert response.child_policy_tags == ["child_policy_tags_value"] + + +@pytest.mark.asyncio +async def test_get_policy_tag_async_from_dict(): + await test_get_policy_tag_async(request_type=dict) + + +def test_get_policy_tag_field_headers(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetPolicyTagRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: + call.return_value = policytagmanager.PolicyTag() + + client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetPolicyTagRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag() + ) + + await client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_policy_tag_flattened(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_policy_tag(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_policy_tag_flattened_error(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_policy_tag( + policytagmanager.GetPolicyTagRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_policy_tag(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_policy_tag( + policytagmanager.GetPolicyTagRequest(), name="name_value", + ) + + +def test_get_iam_policy( + transport: str = "grpc", request_type=iam_policy.GetIamPolicyRequest +): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_from_dict(): + test_get_iam_policy(request_type=dict) + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.GetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest +): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict_foreign(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_set_iam_policy( + transport: str = "grpc", request_type=iam_policy.SetIamPolicyRequest +): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_from_dict(): + test_set_iam_policy(request_type=dict) + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.SetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest +): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_set_iam_policy_from_dict_foreign(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy.Policy(version=774), + } + ) + call.assert_called() + + +def test_test_iam_permissions( + transport: str = "grpc", request_type=iam_policy.TestIamPermissionsRequest +): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_from_dict(): + test_test_iam_permissions(request_type=dict) + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.TestIamPermissionsRequest() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async( + transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest +): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict_foreign(): + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = PolicyTagManagerClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PolicyTagManagerGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.PolicyTagManagerGrpcTransport,) + + +def test_policy_tag_manager_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.PolicyTagManagerTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_policy_tag_manager_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PolicyTagManagerTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_taxonomy", + "delete_taxonomy", + "update_taxonomy", + "list_taxonomies", + "get_taxonomy", + "create_policy_tag", + "delete_policy_tag", + "update_policy_tag", + "list_policy_tags", + "get_policy_tag", + "get_iam_policy", + "set_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_policy_tag_manager_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_policy_tag_manager_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerTransport() + adc.assert_called_once() + + +def test_policy_tag_manager_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + PolicyTagManagerClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_policy_tag_manager_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.PolicyTagManagerGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_policy_tag_manager_host_no_port(): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datacatalog.googleapis.com" + ), + ) + assert client.transport._host == "datacatalog.googleapis.com:443" + + +def test_policy_tag_manager_host_with_port(): + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datacatalog.googleapis.com:8000" + ), + ) + assert client.transport._host == "datacatalog.googleapis.com:8000" + + +def test_policy_tag_manager_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_policy_tag_manager_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_policy_tag_path(): + project = "squid" + location = "clam" + taxonomy = "whelk" + policy_tag = "octopus" + + expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format( + project=project, location=location, taxonomy=taxonomy, policy_tag=policy_tag, + ) + actual = PolicyTagManagerClient.policy_tag_path( + project, location, taxonomy, policy_tag + ) + assert expected == actual + + +def test_parse_policy_tag_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "taxonomy": "cuttlefish", + "policy_tag": "mussel", + } + path = PolicyTagManagerClient.policy_tag_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_policy_tag_path(path) + assert expected == actual + + +def test_taxonomy_path(): + project = "winkle" + location = "nautilus" + taxonomy = "scallop" + + expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( + project=project, location=location, taxonomy=taxonomy, + ) + actual = PolicyTagManagerClient.taxonomy_path(project, location, taxonomy) + assert expected == actual + + +def test_parse_taxonomy_path(): + expected = { + "project": "abalone", + "location": "squid", + "taxonomy": "clam", + } + path = PolicyTagManagerClient.taxonomy_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_taxonomy_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PolicyTagManagerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = PolicyTagManagerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + + expected = "folders/{folder}".format(folder=folder,) + actual = PolicyTagManagerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = PolicyTagManagerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + + expected = "organizations/{organization}".format(organization=organization,) + actual = PolicyTagManagerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = PolicyTagManagerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + + expected = "projects/{project}".format(project=project,) + actual = PolicyTagManagerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = PolicyTagManagerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = PolicyTagManagerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = PolicyTagManagerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PolicyTagManagerTransport, "_prep_wrapped_messages" + ) as prep: + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PolicyTagManagerTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PolicyTagManagerClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py b/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py new file mode 100644 index 00000000..b11cda3a --- /dev/null +++ b/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py @@ -0,0 +1,1297 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization import ( + PolicyTagManagerSerializationAsyncClient, +) +from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization import ( + PolicyTagManagerSerializationClient, +) +from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization import ( + transports, +) +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import policytagmanagerserialization +from google.oauth2 import service_account + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(None) is None + assert ( + PolicyTagManagerSerializationClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + PolicyTagManagerSerializationClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + PolicyTagManagerSerializationClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PolicyTagManagerSerializationClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + PolicyTagManagerSerializationClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", + [PolicyTagManagerSerializationClient, PolicyTagManagerSerializationAsyncClient,], +) +def test_policy_tag_manager_serialization_client_from_service_account_info( + client_class, +): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "datacatalog.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", + [PolicyTagManagerSerializationClient, PolicyTagManagerSerializationAsyncClient,], +) +def test_policy_tag_manager_serialization_client_from_service_account_file( + client_class, +): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "datacatalog.googleapis.com:443" + + +def test_policy_tag_manager_serialization_client_get_transport_class(): + transport = PolicyTagManagerSerializationClient.get_transport_class() + available_transports = [ + transports.PolicyTagManagerSerializationGrpcTransport, + ] + assert transport in available_transports + + transport = PolicyTagManagerSerializationClient.get_transport_class("grpc") + assert transport == transports.PolicyTagManagerSerializationGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + PolicyTagManagerSerializationClient, + transports.PolicyTagManagerSerializationGrpcTransport, + "grpc", + ), + ( + PolicyTagManagerSerializationAsyncClient, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + PolicyTagManagerSerializationClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerSerializationClient), +) +@mock.patch.object( + PolicyTagManagerSerializationAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerSerializationAsyncClient), +) +def test_policy_tag_manager_serialization_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + PolicyTagManagerSerializationClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + PolicyTagManagerSerializationClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + PolicyTagManagerSerializationClient, + transports.PolicyTagManagerSerializationGrpcTransport, + "grpc", + "true", + ), + ( + PolicyTagManagerSerializationAsyncClient, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + PolicyTagManagerSerializationClient, + transports.PolicyTagManagerSerializationGrpcTransport, + "grpc", + "false", + ), + ( + PolicyTagManagerSerializationAsyncClient, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + PolicyTagManagerSerializationClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerSerializationClient), +) +@mock.patch.object( + PolicyTagManagerSerializationAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerSerializationAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_policy_tag_manager_serialization_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + PolicyTagManagerSerializationClient, + transports.PolicyTagManagerSerializationGrpcTransport, + "grpc", + ), + ( + PolicyTagManagerSerializationAsyncClient, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_policy_tag_manager_serialization_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + PolicyTagManagerSerializationClient, + transports.PolicyTagManagerSerializationGrpcTransport, + "grpc", + ), + ( + PolicyTagManagerSerializationAsyncClient, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_policy_tag_manager_serialization_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_policy_tag_manager_serialization_client_client_options_from_dict(): + with mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PolicyTagManagerSerializationClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_import_taxonomies( + transport: str = "grpc", + request_type=policytagmanagerserialization.ImportTaxonomiesRequest, +): + client = PolicyTagManagerSerializationClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse() + + response = client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse) + + +def test_import_taxonomies_from_dict(): + test_import_taxonomies(request_type=dict) + + +def test_import_taxonomies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerSerializationClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), "__call__" + ) as call: + client.import_taxonomies() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() + + +@pytest.mark.asyncio +async def test_import_taxonomies_async( + transport: str = "grpc_asyncio", + request_type=policytagmanagerserialization.ImportTaxonomiesRequest, +): + client = PolicyTagManagerSerializationAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanagerserialization.ImportTaxonomiesResponse() + ) + + response = await client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse) + + +@pytest.mark.asyncio +async def test_import_taxonomies_async_from_dict(): + await test_import_taxonomies_async(request_type=dict) + + +def test_import_taxonomies_field_headers(): + client = PolicyTagManagerSerializationClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ImportTaxonomiesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), "__call__" + ) as call: + call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse() + + client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_taxonomies_field_headers_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ImportTaxonomiesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanagerserialization.ImportTaxonomiesResponse() + ) + + await client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_export_taxonomies( + transport: str = "grpc", + request_type=policytagmanagerserialization.ExportTaxonomiesRequest, +): + client = PolicyTagManagerSerializationClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse() + + response = client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse) + + +def test_export_taxonomies_from_dict(): + test_export_taxonomies(request_type=dict) + + +def test_export_taxonomies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerSerializationClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), "__call__" + ) as call: + client.export_taxonomies() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() + + +@pytest.mark.asyncio +async def test_export_taxonomies_async( + transport: str = "grpc_asyncio", + request_type=policytagmanagerserialization.ExportTaxonomiesRequest, +): + client = PolicyTagManagerSerializationAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanagerserialization.ExportTaxonomiesResponse() + ) + + response = await client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse) + + +@pytest.mark.asyncio +async def test_export_taxonomies_async_from_dict(): + await test_export_taxonomies_async(request_type=dict) + + +def test_export_taxonomies_field_headers(): + client = PolicyTagManagerSerializationClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ExportTaxonomiesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), "__call__" + ) as call: + call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse() + + client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_taxonomies_field_headers_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ExportTaxonomiesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanagerserialization.ExportTaxonomiesResponse() + ) + + await client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = PolicyTagManagerSerializationClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PolicyTagManagerSerializationClient( + credentials=credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, transports.PolicyTagManagerSerializationGrpcTransport, + ) + + +def test_policy_tag_manager_serialization_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.PolicyTagManagerSerializationTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_policy_tag_manager_serialization_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PolicyTagManagerSerializationTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "import_taxonomies", + "export_taxonomies", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_policy_tag_manager_serialization_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerSerializationTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_policy_tag_manager_serialization_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerSerializationTransport() + adc.assert_called_once() + + +def test_policy_tag_manager_serialization_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + PolicyTagManagerSerializationClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_policy_tag_manager_serialization_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.PolicyTagManagerSerializationGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_serialization_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_policy_tag_manager_serialization_host_no_port(): + client = PolicyTagManagerSerializationClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datacatalog.googleapis.com" + ), + ) + assert client.transport._host == "datacatalog.googleapis.com:443" + + +def test_policy_tag_manager_serialization_host_with_port(): + client = PolicyTagManagerSerializationClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datacatalog.googleapis.com:8000" + ), + ) + assert client.transport._host == "datacatalog.googleapis.com:8000" + + +def test_policy_tag_manager_serialization_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_policy_tag_manager_serialization_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_serialization_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_serialization_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_taxonomy_path(): + project = "squid" + location = "clam" + taxonomy = "whelk" + + expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( + project=project, location=location, taxonomy=taxonomy, + ) + actual = PolicyTagManagerSerializationClient.taxonomy_path( + project, location, taxonomy + ) + assert expected == actual + + +def test_parse_taxonomy_path(): + expected = { + "project": "octopus", + "location": "oyster", + "taxonomy": "nudibranch", + } + path = PolicyTagManagerSerializationClient.taxonomy_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_taxonomy_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PolicyTagManagerSerializationClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = PolicyTagManagerSerializationClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + + expected = "folders/{folder}".format(folder=folder,) + actual = PolicyTagManagerSerializationClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = PolicyTagManagerSerializationClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + + expected = "organizations/{organization}".format(organization=organization,) + actual = PolicyTagManagerSerializationClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = PolicyTagManagerSerializationClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + + expected = "projects/{project}".format(project=project,) + actual = PolicyTagManagerSerializationClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = PolicyTagManagerSerializationClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = PolicyTagManagerSerializationClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = PolicyTagManagerSerializationClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PolicyTagManagerSerializationTransport, "_prep_wrapped_messages" + ) as prep: + client = PolicyTagManagerSerializationClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PolicyTagManagerSerializationTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PolicyTagManagerSerializationClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) From a6db7be3ea98759041c0b6c69b3d900912d64d3d Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 16 Apr 2021 02:32:44 -0700 Subject: [PATCH 04/13] feat: Add toll pass for Chicago, Massachusetts Turnpike, and San Francisco. PiperOrigin-RevId: 365836845 Source-Author: Google APIs Source-Date: Tue Mar 30 10:09:43 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: 8586c62be9ba602c65d727485c828ed53ce11960 Source-Link: https://github.com/googleapis/googleapis/commit/8586c62be9ba602c65d727485c828ed53ce11960 --- google/cloud/datacatalog_v1/__init__.py | 4 ++-- synth.metadata | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/google/cloud/datacatalog_v1/__init__.py b/google/cloud/datacatalog_v1/__init__.py index 7f2e8be3..28e18503 100644 --- a/google/cloud/datacatalog_v1/__init__.py +++ b/google/cloud/datacatalog_v1/__init__.py @@ -147,7 +147,7 @@ "ListTaxonomiesResponse", "LookupEntryRequest", "PolicyTag", - "PolicyTagManagerClient", + "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldEnumValueRequest", "RenameTagTemplateFieldRequest", "Schema", @@ -173,5 +173,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "PolicyTagManagerSerializationClient", + "PolicyTagManagerClient", ) diff --git a/synth.metadata b/synth.metadata index 8dd8248e..d2232722 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "a1ab4d44db02d59ff58810c6d4182d84e4b9abaa", - "internalRef": "365620142" + "sha": "8586c62be9ba602c65d727485c828ed53ce11960", + "internalRef": "365836845" } }, { From b03a0256ca83329462783280376c436e8906745f Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 16 Apr 2021 02:34:06 -0700 Subject: [PATCH 05/13] chore: Add `grpc_service_config` to googleads C# GAPICs Committer: @aohren PiperOrigin-RevId: 365844681 Source-Author: Google APIs Source-Date: Tue Mar 30 10:39:42 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: d6642a0d79ab1500f342e6d2a65b3d44a97841bc Source-Link: https://github.com/googleapis/googleapis/commit/d6642a0d79ab1500f342e6d2a65b3d44a97841bc --- google/cloud/datacatalog_v1/__init__.py | 4 ++-- synth.metadata | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/google/cloud/datacatalog_v1/__init__.py b/google/cloud/datacatalog_v1/__init__.py index 28e18503..7f2e8be3 100644 --- a/google/cloud/datacatalog_v1/__init__.py +++ b/google/cloud/datacatalog_v1/__init__.py @@ -147,7 +147,7 @@ "ListTaxonomiesResponse", "LookupEntryRequest", "PolicyTag", - "PolicyTagManagerSerializationClient", + "PolicyTagManagerClient", "RenameTagTemplateFieldEnumValueRequest", "RenameTagTemplateFieldRequest", "Schema", @@ -173,5 +173,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "PolicyTagManagerClient", + "PolicyTagManagerSerializationClient", ) diff --git a/synth.metadata b/synth.metadata index d2232722..6dc45d5f 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "8586c62be9ba602c65d727485c828ed53ce11960", - "internalRef": "365836845" + "sha": "d6642a0d79ab1500f342e6d2a65b3d44a97841bc", + "internalRef": "365844681" } }, { From 87b7e8573263274345bb6451659249aef752ffe2 Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 16 Apr 2021 02:35:54 -0700 Subject: [PATCH 06/13] chore: upgrade gapic-generator-python to 0.43.2 PiperOrigin-RevId: 366826395 Source-Author: Google APIs Source-Date: Mon Apr 5 10:39:44 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: 2490be32f08cccc870862ea86920f58325795c89 Source-Link: https://github.com/googleapis/googleapis/commit/2490be32f08cccc870862ea86920f58325795c89 --- .../services/data_catalog/async_client.py | 6 +-- .../services/data_catalog/client.py | 6 +-- .../services/data_catalog/transports/base.py | 6 +-- .../services/data_catalog/transports/grpc.py | 10 ++-- .../data_catalog/transports/grpc_asyncio.py | 10 ++-- .../policy_tag_manager/async_client.py | 6 +-- .../services/policy_tag_manager/client.py | 6 +-- .../policy_tag_manager/transports/base.py | 6 +-- .../policy_tag_manager/transports/grpc.py | 10 ++-- .../transports/grpc_asyncio.py | 10 ++-- google/cloud/datacatalog_v1beta1/__init__.py | 4 +- .../services/data_catalog/async_client.py | 6 +-- .../services/data_catalog/client.py | 6 +-- .../services/data_catalog/transports/base.py | 6 +-- .../services/data_catalog/transports/grpc.py | 10 ++-- .../data_catalog/transports/grpc_asyncio.py | 10 ++-- .../policy_tag_manager/async_client.py | 6 +-- .../services/policy_tag_manager/client.py | 6 +-- .../policy_tag_manager/transports/base.py | 6 +-- .../policy_tag_manager/transports/grpc.py | 10 ++-- .../transports/grpc_asyncio.py | 10 ++-- scripts/fixup_datacatalog_v1_keywords.py | 4 +- scripts/fixup_datacatalog_v1beta1_keywords.py | 4 +- synth.metadata | 4 +- .../gapic/datacatalog_v1/test_data_catalog.py | 48 +++++++++---------- .../datacatalog_v1/test_policy_tag_manager.py | 36 +++++++------- .../datacatalog_v1beta1/test_data_catalog.py | 48 +++++++++---------- .../test_policy_tag_manager.py | 36 +++++++------- 28 files changed, 168 insertions(+), 168 deletions(-) diff --git a/google/cloud/datacatalog_v1/services/data_catalog/async_client.py b/google/cloud/datacatalog_v1/services/data_catalog/async_client.py index 9eb89baf..9b90641e 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/async_client.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/async_client.py @@ -39,7 +39,7 @@ from google.cloud.datacatalog_v1.types import tags from google.cloud.datacatalog_v1.types import timestamps from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as gi_policy # type: ignore from google.protobuf import field_mask_pb2 as field_mask # type: ignore from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO @@ -2587,7 +2587,7 @@ async def set_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> gi_policy.Policy: r"""Sets the access control policy for a resource. Replaces any existing policy. Supported resources are: @@ -2731,7 +2731,7 @@ async def get_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> gi_policy.Policy: r"""Gets the access control policy for a resource. A ``NOT_FOUND`` error is returned if the resource does not exist. An empty policy is returned if the resource exists but does not have a diff --git a/google/cloud/datacatalog_v1/services/data_catalog/client.py b/google/cloud/datacatalog_v1/services/data_catalog/client.py index 138398ab..8ab9efa2 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/client.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/client.py @@ -43,7 +43,7 @@ from google.cloud.datacatalog_v1.types import tags from google.cloud.datacatalog_v1.types import timestamps from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as gi_policy # type: ignore from google.protobuf import field_mask_pb2 as field_mask # type: ignore from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO @@ -2831,7 +2831,7 @@ def set_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> gi_policy.Policy: r"""Sets the access control policy for a resource. Replaces any existing policy. Supported resources are: @@ -2974,7 +2974,7 @@ def get_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> gi_policy.Policy: r"""Gets the access control policy for a resource. A ``NOT_FOUND`` error is returned if the resource does not exist. An empty policy is returned if the resource exists but does not have a diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py index 0dab8ad3..2735e646 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py @@ -28,7 +28,7 @@ from google.cloud.datacatalog_v1.types import datacatalog from google.cloud.datacatalog_v1.types import tags from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as gi_policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore @@ -521,7 +521,7 @@ def set_iam_policy( self, ) -> typing.Callable[ [iam_policy.SetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + typing.Union[gi_policy.Policy, typing.Awaitable[gi_policy.Policy]], ]: raise NotImplementedError() @@ -530,7 +530,7 @@ def get_iam_policy( self, ) -> typing.Callable[ [iam_policy.GetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + typing.Union[gi_policy.Policy, typing.Awaitable[gi_policy.Policy]], ]: raise NotImplementedError() diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py index 7206d1e1..92b453f0 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py @@ -29,7 +29,7 @@ from google.cloud.datacatalog_v1.types import datacatalog from google.cloud.datacatalog_v1.types import tags from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as gi_policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO @@ -985,7 +985,7 @@ def list_tags( @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy.SetIamPolicyRequest], gi_policy.Policy]: r"""Return a callable for the set iam policy method over gRPC. Sets the access control policy for a resource. Replaces any @@ -1020,14 +1020,14 @@ def set_iam_policy( self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/SetIamPolicy", request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=gi_policy.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy.GetIamPolicyRequest], gi_policy.Policy]: r"""Return a callable for the get iam policy method over gRPC. Gets the access control policy for a resource. A ``NOT_FOUND`` @@ -1066,7 +1066,7 @@ def get_iam_policy( self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/GetIamPolicy", request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=gi_policy.Policy.FromString, ) return self._stubs["get_iam_policy"] diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py index 4b61e241..62f97d12 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py @@ -30,7 +30,7 @@ from google.cloud.datacatalog_v1.types import datacatalog from google.cloud.datacatalog_v1.types import tags from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as gi_policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO @@ -1017,7 +1017,7 @@ def list_tags( @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[gi_policy.Policy]]: r"""Return a callable for the set iam policy method over gRPC. Sets the access control policy for a resource. Replaces any @@ -1052,14 +1052,14 @@ def set_iam_policy( self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/SetIamPolicy", request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=gi_policy.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[gi_policy.Policy]]: r"""Return a callable for the get iam policy method over gRPC. Gets the access control policy for a resource. A ``NOT_FOUND`` @@ -1098,7 +1098,7 @@ def get_iam_policy( self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/GetIamPolicy", request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=gi_policy.Policy.FromString, ) return self._stubs["get_iam_policy"] diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py index 0fb150d4..bd573751 100644 --- a/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py @@ -32,7 +32,7 @@ from google.cloud.datacatalog_v1.types import policytagmanager from google.cloud.datacatalog_v1.types import timestamps from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as gi_policy # type: ignore from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport @@ -1028,7 +1028,7 @@ async def get_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> gi_policy.Policy: r"""Gets the IAM policy for a policy tag or a taxonomy. Args: @@ -1135,7 +1135,7 @@ async def set_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> gi_policy.Policy: r"""Sets the IAM policy for a policy tag or a taxonomy. Args: diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py index 7e4f27e5..eb3a0c7e 100644 --- a/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py @@ -36,7 +36,7 @@ from google.cloud.datacatalog_v1.types import policytagmanager from google.cloud.datacatalog_v1.types import timestamps from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as gi_policy # type: ignore from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO from .transports.grpc import PolicyTagManagerGrpcTransport @@ -1231,7 +1231,7 @@ def get_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> gi_policy.Policy: r"""Gets the IAM policy for a policy tag or a taxonomy. Args: @@ -1337,7 +1337,7 @@ def set_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> gi_policy.Policy: r"""Sets the IAM policy for a policy tag or a taxonomy. Args: diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py index aacfe62b..b9a4ea22 100644 --- a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py @@ -27,7 +27,7 @@ from google.cloud.datacatalog_v1.types import policytagmanager from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as gi_policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore @@ -265,7 +265,7 @@ def get_iam_policy( self, ) -> typing.Callable[ [iam_policy.GetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + typing.Union[gi_policy.Policy, typing.Awaitable[gi_policy.Policy]], ]: raise NotImplementedError() @@ -274,7 +274,7 @@ def set_iam_policy( self, ) -> typing.Callable[ [iam_policy.SetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + typing.Union[gi_policy.Policy, typing.Awaitable[gi_policy.Policy]], ]: raise NotImplementedError() diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py index a20ebcc6..7d557c22 100644 --- a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py @@ -28,7 +28,7 @@ from google.cloud.datacatalog_v1.types import policytagmanager from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as gi_policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO @@ -515,7 +515,7 @@ def get_policy_tag( @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy.GetIamPolicyRequest], gi_policy.Policy]: r"""Return a callable for the get iam policy method over gRPC. Gets the IAM policy for a policy tag or a taxonomy. @@ -534,14 +534,14 @@ def get_iam_policy( self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.PolicyTagManager/GetIamPolicy", request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=gi_policy.Policy.FromString, ) return self._stubs["get_iam_policy"] @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy.SetIamPolicyRequest], gi_policy.Policy]: r"""Return a callable for the set iam policy method over gRPC. Sets the IAM policy for a policy tag or a taxonomy. @@ -560,7 +560,7 @@ def set_iam_policy( self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.PolicyTagManager/SetIamPolicy", request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=gi_policy.Policy.FromString, ) return self._stubs["set_iam_policy"] diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py index 13f68af2..fefe529a 100644 --- a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py @@ -29,7 +29,7 @@ from google.cloud.datacatalog_v1.types import policytagmanager from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as gi_policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO @@ -527,7 +527,7 @@ def get_policy_tag( @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[gi_policy.Policy]]: r"""Return a callable for the get iam policy method over gRPC. Gets the IAM policy for a policy tag or a taxonomy. @@ -546,14 +546,14 @@ def get_iam_policy( self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.PolicyTagManager/GetIamPolicy", request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=gi_policy.Policy.FromString, ) return self._stubs["get_iam_policy"] @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[gi_policy.Policy]]: r"""Return a callable for the set iam policy method over gRPC. Sets the IAM policy for a policy tag or a taxonomy. @@ -572,7 +572,7 @@ def set_iam_policy( self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.PolicyTagManager/SetIamPolicy", request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=gi_policy.Policy.FromString, ) return self._stubs["set_iam_policy"] diff --git a/google/cloud/datacatalog_v1beta1/__init__.py b/google/cloud/datacatalog_v1beta1/__init__.py index 8bc01583..16534418 100644 --- a/google/cloud/datacatalog_v1beta1/__init__.py +++ b/google/cloud/datacatalog_v1beta1/__init__.py @@ -103,7 +103,6 @@ "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", "CreateTaxonomyRequest", - "DataCatalogClient", "DeleteEntryGroupRequest", "DeleteEntryRequest", "DeletePolicyTagRequest", @@ -140,6 +139,7 @@ "ListTaxonomiesResponse", "LookupEntryRequest", "PolicyTag", + "PolicyTagManagerClient", "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldRequest", "Schema", @@ -165,5 +165,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "PolicyTagManagerClient", + "DataCatalogClient", ) diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py index f334609f..55b36bd0 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py @@ -38,7 +38,7 @@ from google.cloud.datacatalog_v1beta1.types import tags from google.cloud.datacatalog_v1beta1.types import timestamps from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as giv_policy # type: ignore from google.protobuf import field_mask_pb2 as field_mask # type: ignore from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO @@ -2499,7 +2499,7 @@ async def set_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> giv_policy.Policy: r"""Sets the access control policy for a resource. Replaces any existing policy. Supported resources are: @@ -2643,7 +2643,7 @@ async def get_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> giv_policy.Policy: r"""Gets the access control policy for a resource. A ``NOT_FOUND`` error is returned if the resource does not exist. An empty policy is returned if the resource exists but does not have a diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py index 28d471aa..907a4e99 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py @@ -42,7 +42,7 @@ from google.cloud.datacatalog_v1beta1.types import tags from google.cloud.datacatalog_v1beta1.types import timestamps from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as giv_policy # type: ignore from google.protobuf import field_mask_pb2 as field_mask # type: ignore from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO @@ -2679,7 +2679,7 @@ def set_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> giv_policy.Policy: r"""Sets the access control policy for a resource. Replaces any existing policy. Supported resources are: @@ -2822,7 +2822,7 @@ def get_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> giv_policy.Policy: r"""Gets the access control policy for a resource. A ``NOT_FOUND`` error is returned if the resource does not exist. An empty policy is returned if the resource exists but does not have a diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py index 40f9af3e..6d0fd33f 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py @@ -28,7 +28,7 @@ from google.cloud.datacatalog_v1beta1.types import datacatalog from google.cloud.datacatalog_v1beta1.types import tags from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as giv_policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore @@ -543,7 +543,7 @@ def set_iam_policy( self, ) -> typing.Callable[ [iam_policy.SetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + typing.Union[giv_policy.Policy, typing.Awaitable[giv_policy.Policy]], ]: raise NotImplementedError() @@ -552,7 +552,7 @@ def get_iam_policy( self, ) -> typing.Callable[ [iam_policy.GetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + typing.Union[giv_policy.Policy, typing.Awaitable[giv_policy.Policy]], ]: raise NotImplementedError() diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py index 7fdff619..2178e29f 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py @@ -29,7 +29,7 @@ from google.cloud.datacatalog_v1beta1.types import datacatalog from google.cloud.datacatalog_v1beta1.types import tags from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as giv_policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO @@ -942,7 +942,7 @@ def list_tags( @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy.SetIamPolicyRequest], giv_policy.Policy]: r"""Return a callable for the set iam policy method over gRPC. Sets the access control policy for a resource. Replaces any @@ -977,14 +977,14 @@ def set_iam_policy( self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/SetIamPolicy", request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=giv_policy.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy.GetIamPolicyRequest], giv_policy.Policy]: r"""Return a callable for the get iam policy method over gRPC. Gets the access control policy for a resource. A ``NOT_FOUND`` @@ -1023,7 +1023,7 @@ def get_iam_policy( self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/GetIamPolicy", request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=giv_policy.Policy.FromString, ) return self._stubs["get_iam_policy"] diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py index bdf72f4d..22a44db0 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py @@ -30,7 +30,7 @@ from google.cloud.datacatalog_v1beta1.types import datacatalog from google.cloud.datacatalog_v1beta1.types import tags from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as giv_policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO @@ -973,7 +973,7 @@ def list_tags( @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[giv_policy.Policy]]: r"""Return a callable for the set iam policy method over gRPC. Sets the access control policy for a resource. Replaces any @@ -1008,14 +1008,14 @@ def set_iam_policy( self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/SetIamPolicy", request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=giv_policy.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[giv_policy.Policy]]: r"""Return a callable for the get iam policy method over gRPC. Gets the access control policy for a resource. A ``NOT_FOUND`` @@ -1054,7 +1054,7 @@ def get_iam_policy( self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/GetIamPolicy", request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=giv_policy.Policy.FromString, ) return self._stubs["get_iam_policy"] diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py index 7f0cbecc..57a5c560 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py @@ -31,7 +31,7 @@ from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import pagers from google.cloud.datacatalog_v1beta1.types import policytagmanager from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as giv_policy # type: ignore from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport @@ -975,7 +975,7 @@ async def get_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> giv_policy.Policy: r"""Gets the IAM policy for a taxonomy or a policy tag. Args: @@ -1082,7 +1082,7 @@ async def set_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> giv_policy.Policy: r"""Sets the IAM policy for a taxonomy or a policy tag. Args: diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py index 152d0a10..43da5344 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py @@ -35,7 +35,7 @@ from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import pagers from google.cloud.datacatalog_v1beta1.types import policytagmanager from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as giv_policy # type: ignore from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO from .transports.grpc import PolicyTagManagerGrpcTransport @@ -1178,7 +1178,7 @@ def get_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> giv_policy.Policy: r"""Gets the IAM policy for a taxonomy or a policy tag. Args: @@ -1284,7 +1284,7 @@ def set_iam_policy( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> giv_policy.Policy: r"""Sets the IAM policy for a taxonomy or a policy tag. Args: diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py index 1a3c9921..6d117352 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py @@ -27,7 +27,7 @@ from google.cloud.datacatalog_v1beta1.types import policytagmanager from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as giv_policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore @@ -265,7 +265,7 @@ def get_iam_policy( self, ) -> typing.Callable[ [iam_policy.GetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + typing.Union[giv_policy.Policy, typing.Awaitable[giv_policy.Policy]], ]: raise NotImplementedError() @@ -274,7 +274,7 @@ def set_iam_policy( self, ) -> typing.Callable[ [iam_policy.SetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + typing.Union[giv_policy.Policy, typing.Awaitable[giv_policy.Policy]], ]: raise NotImplementedError() diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py index 2bb4e178..84841b83 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py @@ -28,7 +28,7 @@ from google.cloud.datacatalog_v1beta1.types import policytagmanager from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as giv_policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO @@ -502,7 +502,7 @@ def get_policy_tag( @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy.GetIamPolicyRequest], giv_policy.Policy]: r"""Return a callable for the get iam policy method over gRPC. Gets the IAM policy for a taxonomy or a policy tag. @@ -521,14 +521,14 @@ def get_iam_policy( self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetIamPolicy", request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=giv_policy.Policy.FromString, ) return self._stubs["get_iam_policy"] @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy.SetIamPolicyRequest], giv_policy.Policy]: r"""Return a callable for the set iam policy method over gRPC. Sets the IAM policy for a taxonomy or a policy tag. @@ -547,7 +547,7 @@ def set_iam_policy( self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.PolicyTagManager/SetIamPolicy", request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=giv_policy.Policy.FromString, ) return self._stubs["set_iam_policy"] diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py index 9c2dfa26..140d5bfe 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py @@ -29,7 +29,7 @@ from google.cloud.datacatalog_v1beta1.types import policytagmanager from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import policy_pb2 as giv_policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO @@ -514,7 +514,7 @@ def get_policy_tag( @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[giv_policy.Policy]]: r"""Return a callable for the get iam policy method over gRPC. Gets the IAM policy for a taxonomy or a policy tag. @@ -533,14 +533,14 @@ def get_iam_policy( self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetIamPolicy", request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=giv_policy.Policy.FromString, ) return self._stubs["get_iam_policy"] @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[giv_policy.Policy]]: r"""Return a callable for the set iam policy method over gRPC. Sets the IAM policy for a taxonomy or a policy tag. @@ -559,7 +559,7 @@ def set_iam_policy( self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.PolicyTagManager/SetIamPolicy", request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + response_deserializer=giv_policy.Policy.FromString, ) return self._stubs["set_iam_policy"] diff --git a/scripts/fixup_datacatalog_v1_keywords.py b/scripts/fixup_datacatalog_v1_keywords.py index d4ee3ca3..ccf16558 100644 --- a/scripts/fixup_datacatalog_v1_keywords.py +++ b/scripts/fixup_datacatalog_v1_keywords.py @@ -58,7 +58,7 @@ class datacatalogCallTransformer(cst.CSTTransformer): 'export_taxonomies': ('parent', 'taxonomies', 'serialized_taxonomies', ), 'get_entry': ('name', ), 'get_entry_group': ('name', 'read_mask', ), - 'get_iam_policy': ('resource', 'options', ), + 'get_iam_policy': ('resource', 'options_', ), 'get_policy_tag': ('name', ), 'get_tag_template': ('name', ), 'get_taxonomy': ('name', ), @@ -72,7 +72,7 @@ class datacatalogCallTransformer(cst.CSTTransformer): 'rename_tag_template_field': ('name', 'new_tag_template_field_id', ), 'rename_tag_template_field_enum_value': ('name', 'new_enum_value_display_name', ), 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', ), - 'set_iam_policy': ('resource', 'policy', ), + 'set_iam_policy': ('resource', 'policy_', ), 'test_iam_permissions': ('resource', 'permissions', ), 'update_entry': ('entry', 'update_mask', ), 'update_entry_group': ('entry_group', 'update_mask', ), diff --git a/scripts/fixup_datacatalog_v1beta1_keywords.py b/scripts/fixup_datacatalog_v1beta1_keywords.py index bf43018a..5304bf50 100644 --- a/scripts/fixup_datacatalog_v1beta1_keywords.py +++ b/scripts/fixup_datacatalog_v1beta1_keywords.py @@ -58,7 +58,7 @@ class datacatalogCallTransformer(cst.CSTTransformer): 'export_taxonomies': ('parent', 'taxonomies', 'serialized_taxonomies', ), 'get_entry': ('name', ), 'get_entry_group': ('name', 'read_mask', ), - 'get_iam_policy': ('resource', 'options', ), + 'get_iam_policy': ('resource', 'options_', ), 'get_policy_tag': ('name', ), 'get_tag_template': ('name', ), 'get_taxonomy': ('name', ), @@ -71,7 +71,7 @@ class datacatalogCallTransformer(cst.CSTTransformer): 'lookup_entry': ('linked_resource', 'sql_resource', ), 'rename_tag_template_field': ('name', 'new_tag_template_field_id', ), 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', ), - 'set_iam_policy': ('resource', 'policy', ), + 'set_iam_policy': ('resource', 'policy_', ), 'test_iam_permissions': ('resource', 'permissions', ), 'update_entry': ('entry', 'update_mask', ), 'update_entry_group': ('entry_group', 'update_mask', ), diff --git a/synth.metadata b/synth.metadata index 6dc45d5f..cb393bce 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "d6642a0d79ab1500f342e6d2a65b3d44a97841bc", - "internalRef": "365844681" + "sha": "2490be32f08cccc870862ea86920f58325795c89", + "internalRef": "366826395" } }, { diff --git a/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/tests/unit/gapic/datacatalog_v1/test_data_catalog.py index f4966de4..6eeca453 100644 --- a/tests/unit/gapic/datacatalog_v1/test_data_catalog.py +++ b/tests/unit/gapic/datacatalog_v1/test_data_catalog.py @@ -46,8 +46,8 @@ from google.cloud.datacatalog_v1.types import tags from google.cloud.datacatalog_v1.types import timestamps from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import options_pb2 as options # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import options_pb2 as gi_options # type: ignore +from google.iam.v1 import policy_pb2 as gi_policy # type: ignore from google.oauth2 import service_account from google.protobuf import field_mask_pb2 as field_mask # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore @@ -6714,7 +6714,7 @@ def test_set_iam_policy( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = gi_policy.Policy(version=774, etag=b"etag_blob",) response = client.set_iam_policy(request) @@ -6726,7 +6726,7 @@ def test_set_iam_policy( # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, gi_policy.Policy) assert response.version == 774 @@ -6769,7 +6769,7 @@ async def test_set_iam_policy_async( with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + gi_policy.Policy(version=774, etag=b"etag_blob",) ) response = await client.set_iam_policy(request) @@ -6781,7 +6781,7 @@ async def test_set_iam_policy_async( assert args[0] == iam_policy.SetIamPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, gi_policy.Policy) assert response.version == 774 @@ -6803,7 +6803,7 @@ def test_set_iam_policy_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = gi_policy.Policy() client.set_iam_policy(request) @@ -6828,7 +6828,7 @@ async def test_set_iam_policy_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gi_policy.Policy()) await client.set_iam_policy(request) @@ -6847,12 +6847,12 @@ def test_set_iam_policy_from_dict_foreign(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = gi_policy.Policy() response = client.set_iam_policy( request={ "resource": "resource_value", - "policy": policy.Policy(version=774), + "policy_": gi_policy.Policy(version=774), } ) call.assert_called() @@ -6864,7 +6864,7 @@ def test_set_iam_policy_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = gi_policy.Policy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. @@ -6896,9 +6896,9 @@ async def test_set_iam_policy_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = gi_policy.Policy() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gi_policy.Policy()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.set_iam_policy(resource="resource_value",) @@ -6937,7 +6937,7 @@ def test_get_iam_policy( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = gi_policy.Policy(version=774, etag=b"etag_blob",) response = client.get_iam_policy(request) @@ -6949,7 +6949,7 @@ def test_get_iam_policy( # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, gi_policy.Policy) assert response.version == 774 @@ -6992,7 +6992,7 @@ async def test_get_iam_policy_async( with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + gi_policy.Policy(version=774, etag=b"etag_blob",) ) response = await client.get_iam_policy(request) @@ -7004,7 +7004,7 @@ async def test_get_iam_policy_async( assert args[0] == iam_policy.GetIamPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, gi_policy.Policy) assert response.version == 774 @@ -7026,7 +7026,7 @@ def test_get_iam_policy_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = gi_policy.Policy() client.get_iam_policy(request) @@ -7051,7 +7051,7 @@ async def test_get_iam_policy_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gi_policy.Policy()) await client.get_iam_policy(request) @@ -7070,12 +7070,12 @@ def test_get_iam_policy_from_dict_foreign(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = gi_policy.Policy() response = client.get_iam_policy( request={ "resource": "resource_value", - "options": options.GetPolicyOptions(requested_policy_version=2598), + "options_": gi_options.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() @@ -7087,7 +7087,7 @@ def test_get_iam_policy_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = gi_policy.Policy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. @@ -7119,9 +7119,9 @@ async def test_get_iam_policy_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = gi_policy.Policy() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gi_policy.Policy()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_iam_policy(resource="resource_value",) diff --git a/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py b/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py index 66033d53..031279ac 100644 --- a/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py +++ b/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py @@ -43,8 +43,8 @@ from google.cloud.datacatalog_v1.types import policytagmanager from google.cloud.datacatalog_v1.types import timestamps from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import options_pb2 as options # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import options_pb2 as gi_options # type: ignore +from google.iam.v1 import policy_pb2 as gi_policy # type: ignore from google.oauth2 import service_account from google.protobuf import field_mask_pb2 as field_mask # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore @@ -3181,7 +3181,7 @@ def test_get_iam_policy( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = gi_policy.Policy(version=774, etag=b"etag_blob",) response = client.get_iam_policy(request) @@ -3193,7 +3193,7 @@ def test_get_iam_policy( # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, gi_policy.Policy) assert response.version == 774 @@ -3236,7 +3236,7 @@ async def test_get_iam_policy_async( with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + gi_policy.Policy(version=774, etag=b"etag_blob",) ) response = await client.get_iam_policy(request) @@ -3248,7 +3248,7 @@ async def test_get_iam_policy_async( assert args[0] == iam_policy.GetIamPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, gi_policy.Policy) assert response.version == 774 @@ -3270,7 +3270,7 @@ def test_get_iam_policy_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = gi_policy.Policy() client.get_iam_policy(request) @@ -3297,7 +3297,7 @@ async def test_get_iam_policy_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gi_policy.Policy()) await client.get_iam_policy(request) @@ -3316,12 +3316,12 @@ def test_get_iam_policy_from_dict_foreign(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = gi_policy.Policy() response = client.get_iam_policy( request={ "resource": "resource_value", - "options": options.GetPolicyOptions(requested_policy_version=2598), + "options_": gi_options.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() @@ -3341,7 +3341,7 @@ def test_set_iam_policy( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = gi_policy.Policy(version=774, etag=b"etag_blob",) response = client.set_iam_policy(request) @@ -3353,7 +3353,7 @@ def test_set_iam_policy( # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, gi_policy.Policy) assert response.version == 774 @@ -3396,7 +3396,7 @@ async def test_set_iam_policy_async( with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + gi_policy.Policy(version=774, etag=b"etag_blob",) ) response = await client.set_iam_policy(request) @@ -3408,7 +3408,7 @@ async def test_set_iam_policy_async( assert args[0] == iam_policy.SetIamPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, gi_policy.Policy) assert response.version == 774 @@ -3430,7 +3430,7 @@ def test_set_iam_policy_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = gi_policy.Policy() client.set_iam_policy(request) @@ -3457,7 +3457,7 @@ async def test_set_iam_policy_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gi_policy.Policy()) await client.set_iam_policy(request) @@ -3476,12 +3476,12 @@ def test_set_iam_policy_from_dict_foreign(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = gi_policy.Policy() response = client.set_iam_policy( request={ "resource": "resource_value", - "policy": policy.Policy(version=774), + "policy_": gi_policy.Policy(version=774), } ) call.assert_called() diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py b/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py index 1d7aeb41..2ab037f7 100644 --- a/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py +++ b/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py @@ -47,8 +47,8 @@ from google.cloud.datacatalog_v1beta1.types import tags from google.cloud.datacatalog_v1beta1.types import timestamps from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import options_pb2 as options # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import options_pb2 as giv_options # type: ignore +from google.iam.v1 import policy_pb2 as giv_policy # type: ignore from google.oauth2 import service_account from google.protobuf import field_mask_pb2 as field_mask # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore @@ -6385,7 +6385,7 @@ def test_set_iam_policy( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = giv_policy.Policy(version=774, etag=b"etag_blob",) response = client.set_iam_policy(request) @@ -6397,7 +6397,7 @@ def test_set_iam_policy( # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, giv_policy.Policy) assert response.version == 774 @@ -6440,7 +6440,7 @@ async def test_set_iam_policy_async( with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + giv_policy.Policy(version=774, etag=b"etag_blob",) ) response = await client.set_iam_policy(request) @@ -6452,7 +6452,7 @@ async def test_set_iam_policy_async( assert args[0] == iam_policy.SetIamPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, giv_policy.Policy) assert response.version == 774 @@ -6474,7 +6474,7 @@ def test_set_iam_policy_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = giv_policy.Policy() client.set_iam_policy(request) @@ -6499,7 +6499,7 @@ async def test_set_iam_policy_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(giv_policy.Policy()) await client.set_iam_policy(request) @@ -6518,12 +6518,12 @@ def test_set_iam_policy_from_dict_foreign(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = giv_policy.Policy() response = client.set_iam_policy( request={ "resource": "resource_value", - "policy": policy.Policy(version=774), + "policy_": giv_policy.Policy(version=774), } ) call.assert_called() @@ -6535,7 +6535,7 @@ def test_set_iam_policy_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = giv_policy.Policy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. @@ -6567,9 +6567,9 @@ async def test_set_iam_policy_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = giv_policy.Policy() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(giv_policy.Policy()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.set_iam_policy(resource="resource_value",) @@ -6608,7 +6608,7 @@ def test_get_iam_policy( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = giv_policy.Policy(version=774, etag=b"etag_blob",) response = client.get_iam_policy(request) @@ -6620,7 +6620,7 @@ def test_get_iam_policy( # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, giv_policy.Policy) assert response.version == 774 @@ -6663,7 +6663,7 @@ async def test_get_iam_policy_async( with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + giv_policy.Policy(version=774, etag=b"etag_blob",) ) response = await client.get_iam_policy(request) @@ -6675,7 +6675,7 @@ async def test_get_iam_policy_async( assert args[0] == iam_policy.GetIamPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, giv_policy.Policy) assert response.version == 774 @@ -6697,7 +6697,7 @@ def test_get_iam_policy_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = giv_policy.Policy() client.get_iam_policy(request) @@ -6722,7 +6722,7 @@ async def test_get_iam_policy_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(giv_policy.Policy()) await client.get_iam_policy(request) @@ -6741,12 +6741,12 @@ def test_get_iam_policy_from_dict_foreign(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = giv_policy.Policy() response = client.get_iam_policy( request={ "resource": "resource_value", - "options": options.GetPolicyOptions(requested_policy_version=2598), + "options_": giv_options.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() @@ -6758,7 +6758,7 @@ def test_get_iam_policy_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = giv_policy.Policy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. @@ -6790,9 +6790,9 @@ async def test_get_iam_policy_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = giv_policy.Policy() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(giv_policy.Policy()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_iam_policy(resource="resource_value",) diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py index 4b073a35..0282201e 100644 --- a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py +++ b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py @@ -42,8 +42,8 @@ from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import transports from google.cloud.datacatalog_v1beta1.types import policytagmanager from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import options_pb2 as options # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import options_pb2 as giv_options # type: ignore +from google.iam.v1 import policy_pb2 as giv_policy # type: ignore from google.oauth2 import service_account from google.protobuf import field_mask_pb2 as field_mask # type: ignore from google.type import expr_pb2 as expr # type: ignore @@ -3161,7 +3161,7 @@ def test_get_iam_policy( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = giv_policy.Policy(version=774, etag=b"etag_blob",) response = client.get_iam_policy(request) @@ -3173,7 +3173,7 @@ def test_get_iam_policy( # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, giv_policy.Policy) assert response.version == 774 @@ -3216,7 +3216,7 @@ async def test_get_iam_policy_async( with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + giv_policy.Policy(version=774, etag=b"etag_blob",) ) response = await client.get_iam_policy(request) @@ -3228,7 +3228,7 @@ async def test_get_iam_policy_async( assert args[0] == iam_policy.GetIamPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, giv_policy.Policy) assert response.version == 774 @@ -3250,7 +3250,7 @@ def test_get_iam_policy_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = giv_policy.Policy() client.get_iam_policy(request) @@ -3277,7 +3277,7 @@ async def test_get_iam_policy_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(giv_policy.Policy()) await client.get_iam_policy(request) @@ -3296,12 +3296,12 @@ def test_get_iam_policy_from_dict_foreign(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = giv_policy.Policy() response = client.get_iam_policy( request={ "resource": "resource_value", - "options": options.GetPolicyOptions(requested_policy_version=2598), + "options_": giv_options.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() @@ -3321,7 +3321,7 @@ def test_set_iam_policy( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = giv_policy.Policy(version=774, etag=b"etag_blob",) response = client.set_iam_policy(request) @@ -3333,7 +3333,7 @@ def test_set_iam_policy( # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, giv_policy.Policy) assert response.version == 774 @@ -3376,7 +3376,7 @@ async def test_set_iam_policy_async( with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + giv_policy.Policy(version=774, etag=b"etag_blob",) ) response = await client.set_iam_policy(request) @@ -3388,7 +3388,7 @@ async def test_set_iam_policy_async( assert args[0] == iam_policy.SetIamPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, giv_policy.Policy) assert response.version == 774 @@ -3410,7 +3410,7 @@ def test_set_iam_policy_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = giv_policy.Policy() client.set_iam_policy(request) @@ -3437,7 +3437,7 @@ async def test_set_iam_policy_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(giv_policy.Policy()) await client.set_iam_policy(request) @@ -3456,12 +3456,12 @@ def test_set_iam_policy_from_dict_foreign(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = giv_policy.Policy() response = client.set_iam_policy( request={ "resource": "resource_value", - "policy": policy.Policy(version=774), + "policy_": giv_policy.Policy(version=774), } ) call.assert_called() From 49bccdee8b1efb8b6ab98fff7373e37d48c44354 Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 16 Apr 2021 02:35:55 -0700 Subject: [PATCH 07/13] chore(bazel): update rules_gapic to v0.5.3 PiperOrigin-RevId: 366838867 Source-Author: Google APIs Source-Date: Mon Apr 5 11:33:06 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: d78dc2e0cb627d3e48e910abf4b991264affcb56 Source-Link: https://github.com/googleapis/googleapis/commit/d78dc2e0cb627d3e48e910abf4b991264affcb56 --- google/cloud/datacatalog_v1/__init__.py | 4 ++-- google/cloud/datacatalog_v1beta1/__init__.py | 4 ++-- synth.metadata | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/google/cloud/datacatalog_v1/__init__.py b/google/cloud/datacatalog_v1/__init__.py index 7f2e8be3..296877f9 100644 --- a/google/cloud/datacatalog_v1/__init__.py +++ b/google/cloud/datacatalog_v1/__init__.py @@ -108,7 +108,6 @@ "CreateTagTemplateRequest", "CreateTaxonomyRequest", "CrossRegionalSource", - "DataCatalogClient", "DataSource", "DatabaseTableSpec", "DeleteEntryGroupRequest", @@ -148,6 +147,7 @@ "LookupEntryRequest", "PolicyTag", "PolicyTagManagerClient", + "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldEnumValueRequest", "RenameTagTemplateFieldRequest", "Schema", @@ -173,5 +173,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "PolicyTagManagerSerializationClient", + "DataCatalogClient", ) diff --git a/google/cloud/datacatalog_v1beta1/__init__.py b/google/cloud/datacatalog_v1beta1/__init__.py index 16534418..8bc01583 100644 --- a/google/cloud/datacatalog_v1beta1/__init__.py +++ b/google/cloud/datacatalog_v1beta1/__init__.py @@ -103,6 +103,7 @@ "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", "CreateTaxonomyRequest", + "DataCatalogClient", "DeleteEntryGroupRequest", "DeleteEntryRequest", "DeletePolicyTagRequest", @@ -139,7 +140,6 @@ "ListTaxonomiesResponse", "LookupEntryRequest", "PolicyTag", - "PolicyTagManagerClient", "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldRequest", "Schema", @@ -165,5 +165,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "DataCatalogClient", + "PolicyTagManagerClient", ) diff --git a/synth.metadata b/synth.metadata index cb393bce..97205f4a 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "2490be32f08cccc870862ea86920f58325795c89", - "internalRef": "366826395" + "sha": "d78dc2e0cb627d3e48e910abf4b991264affcb56", + "internalRef": "366838867" } }, { From 562f6972dbe3dbd2318cfb1c1941de315501775d Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 16 Apr 2021 02:38:33 -0700 Subject: [PATCH 08/13] fix!: remove empty appengine/v1beta/deployed_files.proto PiperOrigin-RevId: 366869955 Source-Author: Google APIs Source-Date: Mon Apr 5 14:05:01 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: fa7915f8d43926de5effb815129a274579fa84df Source-Link: https://github.com/googleapis/googleapis/commit/fa7915f8d43926de5effb815129a274579fa84df --- google/cloud/datacatalog_v1/__init__.py | 4 ++-- google/cloud/datacatalog_v1beta1/__init__.py | 4 ++-- synth.metadata | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/google/cloud/datacatalog_v1/__init__.py b/google/cloud/datacatalog_v1/__init__.py index 296877f9..7f2e8be3 100644 --- a/google/cloud/datacatalog_v1/__init__.py +++ b/google/cloud/datacatalog_v1/__init__.py @@ -108,6 +108,7 @@ "CreateTagTemplateRequest", "CreateTaxonomyRequest", "CrossRegionalSource", + "DataCatalogClient", "DataSource", "DatabaseTableSpec", "DeleteEntryGroupRequest", @@ -147,7 +148,6 @@ "LookupEntryRequest", "PolicyTag", "PolicyTagManagerClient", - "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldEnumValueRequest", "RenameTagTemplateFieldRequest", "Schema", @@ -173,5 +173,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "DataCatalogClient", + "PolicyTagManagerSerializationClient", ) diff --git a/google/cloud/datacatalog_v1beta1/__init__.py b/google/cloud/datacatalog_v1beta1/__init__.py index 8bc01583..16534418 100644 --- a/google/cloud/datacatalog_v1beta1/__init__.py +++ b/google/cloud/datacatalog_v1beta1/__init__.py @@ -103,7 +103,6 @@ "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", "CreateTaxonomyRequest", - "DataCatalogClient", "DeleteEntryGroupRequest", "DeleteEntryRequest", "DeletePolicyTagRequest", @@ -140,6 +139,7 @@ "ListTaxonomiesResponse", "LookupEntryRequest", "PolicyTag", + "PolicyTagManagerClient", "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldRequest", "Schema", @@ -165,5 +165,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "PolicyTagManagerClient", + "DataCatalogClient", ) diff --git a/synth.metadata b/synth.metadata index 97205f4a..30ac5ce5 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "d78dc2e0cb627d3e48e910abf4b991264affcb56", - "internalRef": "366838867" + "sha": "fa7915f8d43926de5effb815129a274579fa84df", + "internalRef": "366869955" } }, { From c580ccfd68f4a7d681fc5384ad2014f3753b771b Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 16 Apr 2021 02:40:47 -0700 Subject: [PATCH 09/13] feat: Add fields for Pub/Sub triggers Committer: @gleeper PiperOrigin-RevId: 368533270 Source-Author: Google APIs Source-Date: Wed Apr 14 16:39:41 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: 9a9e29627a715077cd8e33c0d2bd549437039bd0 Source-Link: https://github.com/googleapis/googleapis/commit/9a9e29627a715077cd8e33c0d2bd549437039bd0 --- google/cloud/datacatalog_v1/__init__.py | 4 ++-- synth.metadata | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/google/cloud/datacatalog_v1/__init__.py b/google/cloud/datacatalog_v1/__init__.py index 7f2e8be3..296877f9 100644 --- a/google/cloud/datacatalog_v1/__init__.py +++ b/google/cloud/datacatalog_v1/__init__.py @@ -108,7 +108,6 @@ "CreateTagTemplateRequest", "CreateTaxonomyRequest", "CrossRegionalSource", - "DataCatalogClient", "DataSource", "DatabaseTableSpec", "DeleteEntryGroupRequest", @@ -148,6 +147,7 @@ "LookupEntryRequest", "PolicyTag", "PolicyTagManagerClient", + "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldEnumValueRequest", "RenameTagTemplateFieldRequest", "Schema", @@ -173,5 +173,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "PolicyTagManagerSerializationClient", + "DataCatalogClient", ) diff --git a/synth.metadata b/synth.metadata index 30ac5ce5..03ba0a78 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "fa7915f8d43926de5effb815129a274579fa84df", - "internalRef": "366869955" + "sha": "9a9e29627a715077cd8e33c0d2bd549437039bd0", + "internalRef": "368533270" } }, { From 5df3732828d67bbae4fd56485f8bb55e12332069 Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 16 Apr 2021 02:41:16 -0700 Subject: [PATCH 10/13] docs: reformat comments in PolicyTagManager definition PiperOrigin-RevId: 368687296 Source-Author: Google APIs Source-Date: Thu Apr 15 11:56:51 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: 91eee3d039fbdbadee008393504900287bbc6f43 Source-Link: https://github.com/googleapis/googleapis/commit/91eee3d039fbdbadee008393504900287bbc6f43 --- .../proto/policytagmanager.proto | 22 ++- .../policy_tag_manager/async_client.py | 150 ++++++++---------- .../services/policy_tag_manager/client.py | 150 ++++++++---------- .../policy_tag_manager/transports/grpc.py | 12 +- .../transports/grpc_asyncio.py | 12 +- .../datacatalog_v1/types/policytagmanager.py | 58 ++++--- synth.metadata | 4 +- 7 files changed, 196 insertions(+), 212 deletions(-) diff --git a/google/cloud/datacatalog_v1/proto/policytagmanager.proto b/google/cloud/datacatalog_v1/proto/policytagmanager.proto index cc52f7d1..0412acc9 100644 --- a/google/cloud/datacatalog_v1/proto/policytagmanager.proto +++ b/google/cloud/datacatalog_v1/proto/policytagmanager.proto @@ -101,11 +101,12 @@ service PolicyTagManager { option (google.api.method_signature) = "parent,policy_tag"; } - // Deletes a policy tag. This method also deletes - // - all of its descendant policy tags, if any, - // - the policies associated with the policy tag and its descendants, and - // - references from BigQuery table schema of the policy tag and its - // descendants. + // Deletes a policy tag. This method also deletes: + // + // * all of its descendant policy tags, if any + // * the policies associated with the policy tag and its descendants + // * references from BigQuery table schema of the policy tag and its + // descendants. rpc DeletePolicyTag(DeletePolicyTagRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1/{name=projects/*/locations/*/taxonomies/*/policyTags/*}" @@ -180,6 +181,8 @@ service PolicyTagManager { // A taxonomy is a collection of hierarchical policy tags that classify data // along a common axis. For instance a "data sensitivity" taxonomy could contain // the following policy tags: +// +// ``` // + PII // + Account number // + Age @@ -187,11 +190,16 @@ service PolicyTagManager { // + Zipcode // + Financials // + Revenue +// ``` +// // A "data origin" taxonomy could contain the following policy tags: +// +// ``` // + User data // + Employee data // + Partner data // + Public data +// ``` message Taxonomy { option (google.api.resource) = { type: "datacatalog.googleapis.com/Taxonomy" @@ -236,10 +244,14 @@ message Taxonomy { // Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be defined // in a hierarchy. For example, consider the following hierarchy: +// +// ``` // + Geolocation // + LatLong // + City // + ZipCode +// ``` +// // Policy tag "Geolocation" contains 3 child policy tags: "LatLong", "City", and // "ZipCode". message PolicyTag { diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py index bd573751..322fcb16 100644 --- a/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py @@ -220,23 +220,17 @@ async def create_taxonomy( Returns: google.cloud.datacatalog_v1.types.Taxonomy: - A taxonomy is a collection of - hierarchical policy tags that classify - data along a common axis. For instance a - "data sensitivity" taxonomy could - contain the following policy tags: - + PII - + Account number - + Age - + SSN - + Zipcode - + Financials - + Revenue - A "data origin" taxonomy could contain - the following policy tags: + User data - + Employee data - + Partner data - + Public data + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. For instance a "data + sensitivity" taxonomy could contain the following + policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy could contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` """ # Create or coerce a protobuf request object. @@ -381,23 +375,17 @@ async def update_taxonomy( Returns: google.cloud.datacatalog_v1.types.Taxonomy: - A taxonomy is a collection of - hierarchical policy tags that classify - data along a common axis. For instance a - "data sensitivity" taxonomy could - contain the following policy tags: - + PII - + Account number - + Age - + SSN - + Zipcode - + Financials - + Revenue - A "data origin" taxonomy could contain - the following policy tags: + User data - + Employee data - + Partner data - + Public data + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. For instance a "data + sensitivity" taxonomy could contain the following + policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy could contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` """ # Create or coerce a protobuf request object. @@ -554,23 +542,17 @@ async def get_taxonomy( Returns: google.cloud.datacatalog_v1.types.Taxonomy: - A taxonomy is a collection of - hierarchical policy tags that classify - data along a common axis. For instance a - "data sensitivity" taxonomy could - contain the following policy tags: - + PII - + Account number - + Age - + SSN - + Zipcode - + Financials - + Revenue - A "data origin" taxonomy could contain - the following policy tags: + User data - + Employee data - + Partner data - + Public data + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. For instance a "data + sensitivity" taxonomy could contain the following + policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy could contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` """ # Create or coerce a protobuf request object. @@ -649,16 +631,14 @@ async def create_policy_tag( Returns: google.cloud.datacatalog_v1.types.PolicyTag: - Denotes one policy tag in a taxonomy - (e.g. ssn). Policy tags can be defined - in a hierarchy. For example, consider - the following hierarchy: + Geolocation - + LatLong - + City - + ZipCode - Policy tag "Geolocation" contains 3 - child policy tags: "LatLong", "City", - and "ZipCode". + Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider the following + hierarchy: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Policy tag "Geolocation" contains 3 child policy + tags: "LatLong", "City", and "ZipCode". """ # Create or coerce a protobuf request object. @@ -710,11 +690,13 @@ async def delete_policy_tag( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a policy tag. This method also deletes - - all of its descendant policy tags, if any, - - the policies associated with the policy tag and its - descendants, and - references from BigQuery table - schema of the policy tag and its descendants. + r"""Deletes a policy tag. This method also deletes: + + - all of its descendant policy tags, if any + - the policies associated with the policy tag and its + descendants + - references from BigQuery table schema of the policy tag and + its descendants. Args: request (:class:`google.cloud.datacatalog_v1.types.DeletePolicyTagRequest`): @@ -806,16 +788,14 @@ async def update_policy_tag( Returns: google.cloud.datacatalog_v1.types.PolicyTag: - Denotes one policy tag in a taxonomy - (e.g. ssn). Policy tags can be defined - in a hierarchy. For example, consider - the following hierarchy: + Geolocation - + LatLong - + City - + ZipCode - Policy tag "Geolocation" contains 3 - child policy tags: "LatLong", "City", - and "ZipCode". + Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider the following + hierarchy: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Policy tag "Geolocation" contains 3 child policy + tags: "LatLong", "City", and "ZipCode". """ # Create or coerce a protobuf request object. @@ -971,16 +951,14 @@ async def get_policy_tag( Returns: google.cloud.datacatalog_v1.types.PolicyTag: - Denotes one policy tag in a taxonomy - (e.g. ssn). Policy tags can be defined - in a hierarchy. For example, consider - the following hierarchy: + Geolocation - + LatLong - + City - + ZipCode - Policy tag "Geolocation" contains 3 - child policy tags: "LatLong", "City", - and "ZipCode". + Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider the following + hierarchy: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Policy tag "Geolocation" contains 3 child policy + tags: "LatLong", "City", and "ZipCode". """ # Create or coerce a protobuf request object. diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py index eb3a0c7e..30158da9 100644 --- a/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py @@ -413,23 +413,17 @@ def create_taxonomy( Returns: google.cloud.datacatalog_v1.types.Taxonomy: - A taxonomy is a collection of - hierarchical policy tags that classify - data along a common axis. For instance a - "data sensitivity" taxonomy could - contain the following policy tags: - + PII - + Account number - + Age - + SSN - + Zipcode - + Financials - + Revenue - A "data origin" taxonomy could contain - the following policy tags: + User data - + Employee data - + Partner data - + Public data + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. For instance a "data + sensitivity" taxonomy could contain the following + policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy could contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` """ # Create or coerce a protobuf request object. @@ -576,23 +570,17 @@ def update_taxonomy( Returns: google.cloud.datacatalog_v1.types.Taxonomy: - A taxonomy is a collection of - hierarchical policy tags that classify - data along a common axis. For instance a - "data sensitivity" taxonomy could - contain the following policy tags: - + PII - + Account number - + Age - + SSN - + Zipcode - + Financials - + Revenue - A "data origin" taxonomy could contain - the following policy tags: + User data - + Employee data - + Partner data - + Public data + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. For instance a "data + sensitivity" taxonomy could contain the following + policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy could contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` """ # Create or coerce a protobuf request object. @@ -751,23 +739,17 @@ def get_taxonomy( Returns: google.cloud.datacatalog_v1.types.Taxonomy: - A taxonomy is a collection of - hierarchical policy tags that classify - data along a common axis. For instance a - "data sensitivity" taxonomy could - contain the following policy tags: - + PII - + Account number - + Age - + SSN - + Zipcode - + Financials - + Revenue - A "data origin" taxonomy could contain - the following policy tags: + User data - + Employee data - + Partner data - + Public data + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. For instance a "data + sensitivity" taxonomy could contain the following + policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy could contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` """ # Create or coerce a protobuf request object. @@ -847,16 +829,14 @@ def create_policy_tag( Returns: google.cloud.datacatalog_v1.types.PolicyTag: - Denotes one policy tag in a taxonomy - (e.g. ssn). Policy tags can be defined - in a hierarchy. For example, consider - the following hierarchy: + Geolocation - + LatLong - + City - + ZipCode - Policy tag "Geolocation" contains 3 - child policy tags: "LatLong", "City", - and "ZipCode". + Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider the following + hierarchy: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Policy tag "Geolocation" contains 3 child policy + tags: "LatLong", "City", and "ZipCode". """ # Create or coerce a protobuf request object. @@ -909,11 +889,13 @@ def delete_policy_tag( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a policy tag. This method also deletes - - all of its descendant policy tags, if any, - - the policies associated with the policy tag and its - descendants, and - references from BigQuery table - schema of the policy tag and its descendants. + r"""Deletes a policy tag. This method also deletes: + + - all of its descendant policy tags, if any + - the policies associated with the policy tag and its + descendants + - references from BigQuery table schema of the policy tag and + its descendants. Args: request (google.cloud.datacatalog_v1.types.DeletePolicyTagRequest): @@ -1006,16 +988,14 @@ def update_policy_tag( Returns: google.cloud.datacatalog_v1.types.PolicyTag: - Denotes one policy tag in a taxonomy - (e.g. ssn). Policy tags can be defined - in a hierarchy. For example, consider - the following hierarchy: + Geolocation - + LatLong - + City - + ZipCode - Policy tag "Geolocation" contains 3 - child policy tags: "LatLong", "City", - and "ZipCode". + Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider the following + hierarchy: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Policy tag "Geolocation" contains 3 child policy + tags: "LatLong", "City", and "ZipCode". """ # Create or coerce a protobuf request object. @@ -1173,16 +1153,14 @@ def get_policy_tag( Returns: google.cloud.datacatalog_v1.types.PolicyTag: - Denotes one policy tag in a taxonomy - (e.g. ssn). Policy tags can be defined - in a hierarchy. For example, consider - the following hierarchy: + Geolocation - + LatLong - + City - + ZipCode - Policy tag "Geolocation" contains 3 - child policy tags: "LatLong", "City", - and "ZipCode". + Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider the following + hierarchy: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Policy tag "Geolocation" contains 3 child policy + tags: "LatLong", "City", and "ZipCode". """ # Create or coerce a protobuf request object. diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py index 7d557c22..d19e22a5 100644 --- a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py @@ -403,11 +403,13 @@ def delete_policy_tag( ) -> Callable[[policytagmanager.DeletePolicyTagRequest], empty.Empty]: r"""Return a callable for the delete policy tag method over gRPC. - Deletes a policy tag. This method also deletes - - all of its descendant policy tags, if any, - - the policies associated with the policy tag and its - descendants, and - references from BigQuery table - schema of the policy tag and its descendants. + Deletes a policy tag. This method also deletes: + + - all of its descendant policy tags, if any + - the policies associated with the policy tag and its + descendants + - references from BigQuery table schema of the policy tag and + its descendants. Returns: Callable[[~.DeletePolicyTagRequest], diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py index fefe529a..3c637375 100644 --- a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py @@ -413,11 +413,13 @@ def delete_policy_tag( ) -> Callable[[policytagmanager.DeletePolicyTagRequest], Awaitable[empty.Empty]]: r"""Return a callable for the delete policy tag method over gRPC. - Deletes a policy tag. This method also deletes - - all of its descendant policy tags, if any, - - the policies associated with the policy tag and its - descendants, and - references from BigQuery table - schema of the policy tag and its descendants. + Deletes a policy tag. This method also deletes: + + - all of its descendant policy tags, if any + - the policies associated with the policy tag and its + descendants + - references from BigQuery table schema of the policy tag and + its descendants. Returns: Callable[[~.DeletePolicyTagRequest], diff --git a/google/cloud/datacatalog_v1/types/policytagmanager.py b/google/cloud/datacatalog_v1/types/policytagmanager.py index 56ad4d87..ebc99f4f 100644 --- a/google/cloud/datacatalog_v1/types/policytagmanager.py +++ b/google/cloud/datacatalog_v1/types/policytagmanager.py @@ -44,21 +44,28 @@ class Taxonomy(proto.Message): - r"""A taxonomy is a collection of hierarchical policy tags that - classify data along a common axis. For instance a "data - sensitivity" taxonomy could contain the following policy tags: - + PII - + Account number - + Age - + SSN - + Zipcode - + Financials - + Revenue - A "data origin" taxonomy could contain the following policy - tags: + User data - + Employee data - + Partner data - + Public data + r"""A taxonomy is a collection of hierarchical policy tags that classify + data along a common axis. For instance a "data sensitivity" taxonomy + could contain the following policy tags: + + :: + + + PII + + Account number + + Age + + SSN + + Zipcode + + Financials + + Revenue + + A "data origin" taxonomy could contain the following policy tags: + + :: + + + User data + + Employee data + + Partner data + + Public data Attributes: name (str): @@ -112,14 +119,19 @@ class PolicyType(proto.Enum): class PolicyTag(proto.Message): - r"""Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags - can be defined in a hierarchy. For example, consider the - following hierarchy: + Geolocation - + LatLong - + City - + ZipCode - Policy tag "Geolocation" contains 3 child policy tags: - "LatLong", "City", and "ZipCode". + r"""Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be + defined in a hierarchy. For example, consider the following + hierarchy: + + :: + + + Geolocation + + LatLong + + City + + ZipCode + + Policy tag "Geolocation" contains 3 child policy tags: "LatLong", + "City", and "ZipCode". Attributes: name (str): diff --git a/synth.metadata b/synth.metadata index 03ba0a78..1ee667a4 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "9a9e29627a715077cd8e33c0d2bd549437039bd0", - "internalRef": "368533270" + "sha": "91eee3d039fbdbadee008393504900287bbc6f43", + "internalRef": "368687296" } }, { From 9649a323e4608a47ef104e04051296fc7c9513df Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 16 Apr 2021 02:42:24 -0700 Subject: [PATCH 11/13] chore: release gapic-generator-php v0.1.0 Committer: @miraleung PiperOrigin-RevId: 368733958 Source-Author: Google APIs Source-Date: Thu Apr 15 15:50:23 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: bd72b4133563262bdb3abbb8e2512a470d4a10f9 Source-Link: https://github.com/googleapis/googleapis/commit/bd72b4133563262bdb3abbb8e2512a470d4a10f9 --- google/cloud/datacatalog_v1/__init__.py | 4 ++-- synth.metadata | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/google/cloud/datacatalog_v1/__init__.py b/google/cloud/datacatalog_v1/__init__.py index 296877f9..7f2e8be3 100644 --- a/google/cloud/datacatalog_v1/__init__.py +++ b/google/cloud/datacatalog_v1/__init__.py @@ -108,6 +108,7 @@ "CreateTagTemplateRequest", "CreateTaxonomyRequest", "CrossRegionalSource", + "DataCatalogClient", "DataSource", "DatabaseTableSpec", "DeleteEntryGroupRequest", @@ -147,7 +148,6 @@ "LookupEntryRequest", "PolicyTag", "PolicyTagManagerClient", - "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldEnumValueRequest", "RenameTagTemplateFieldRequest", "Schema", @@ -173,5 +173,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "DataCatalogClient", + "PolicyTagManagerSerializationClient", ) diff --git a/synth.metadata b/synth.metadata index 1ee667a4..eb891362 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "91eee3d039fbdbadee008393504900287bbc6f43", - "internalRef": "368687296" + "sha": "bd72b4133563262bdb3abbb8e2512a470d4a10f9", + "internalRef": "368733958" } }, { From a9a74d48dba583820f1f0d6827bbb75b1c521ddf Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 16 Apr 2021 02:42:52 -0700 Subject: [PATCH 12/13] feat!: Refactored Elliptic Key curves to provide additional options, the NamedCurve enum is replaced by the EcKeyType message. PiperOrigin-RevId: 368757092 Source-Author: Google APIs Source-Date: Thu Apr 15 18:21:21 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: b1c23ed98caaae6b5d0cf75649afc803bda141f3 Source-Link: https://github.com/googleapis/googleapis/commit/b1c23ed98caaae6b5d0cf75649afc803bda141f3 --- google/cloud/datacatalog_v1/__init__.py | 4 ++-- google/cloud/datacatalog_v1beta1/__init__.py | 4 ++-- synth.metadata | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/google/cloud/datacatalog_v1/__init__.py b/google/cloud/datacatalog_v1/__init__.py index 7f2e8be3..296877f9 100644 --- a/google/cloud/datacatalog_v1/__init__.py +++ b/google/cloud/datacatalog_v1/__init__.py @@ -108,7 +108,6 @@ "CreateTagTemplateRequest", "CreateTaxonomyRequest", "CrossRegionalSource", - "DataCatalogClient", "DataSource", "DatabaseTableSpec", "DeleteEntryGroupRequest", @@ -148,6 +147,7 @@ "LookupEntryRequest", "PolicyTag", "PolicyTagManagerClient", + "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldEnumValueRequest", "RenameTagTemplateFieldRequest", "Schema", @@ -173,5 +173,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "PolicyTagManagerSerializationClient", + "DataCatalogClient", ) diff --git a/google/cloud/datacatalog_v1beta1/__init__.py b/google/cloud/datacatalog_v1beta1/__init__.py index 16534418..8bc01583 100644 --- a/google/cloud/datacatalog_v1beta1/__init__.py +++ b/google/cloud/datacatalog_v1beta1/__init__.py @@ -103,6 +103,7 @@ "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", "CreateTaxonomyRequest", + "DataCatalogClient", "DeleteEntryGroupRequest", "DeleteEntryRequest", "DeletePolicyTagRequest", @@ -139,7 +140,6 @@ "ListTaxonomiesResponse", "LookupEntryRequest", "PolicyTag", - "PolicyTagManagerClient", "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldRequest", "Schema", @@ -165,5 +165,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "DataCatalogClient", + "PolicyTagManagerClient", ) diff --git a/synth.metadata b/synth.metadata index eb891362..e5543c95 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "bd72b4133563262bdb3abbb8e2512a470d4a10f9", - "internalRef": "368733958" + "sha": "b1c23ed98caaae6b5d0cf75649afc803bda141f3", + "internalRef": "368757092" } }, { From 665e14cf53f376e0d6706a813a54890aae995335 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 20 Apr 2021 19:34:01 +0000 Subject: [PATCH 13/13] revert policy rename --- scripts/fixup_datacatalog_v1_keywords.py | 2 +- scripts/fixup_datacatalog_v1beta1_keywords.py | 2 +- synth.py | 33 +++++++++++-------- .../gapic/datacatalog_v1/test_data_catalog.py | 2 +- .../datacatalog_v1/test_policy_tag_manager.py | 2 +- .../datacatalog_v1beta1/test_data_catalog.py | 2 +- .../test_policy_tag_manager.py | 2 +- 7 files changed, 26 insertions(+), 19 deletions(-) diff --git a/scripts/fixup_datacatalog_v1_keywords.py b/scripts/fixup_datacatalog_v1_keywords.py index ccf16558..821d202f 100644 --- a/scripts/fixup_datacatalog_v1_keywords.py +++ b/scripts/fixup_datacatalog_v1_keywords.py @@ -72,7 +72,7 @@ class datacatalogCallTransformer(cst.CSTTransformer): 'rename_tag_template_field': ('name', 'new_tag_template_field_id', ), 'rename_tag_template_field_enum_value': ('name', 'new_enum_value_display_name', ), 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', ), - 'set_iam_policy': ('resource', 'policy_', ), + 'set_iam_policy': ('resource', 'policy', ), 'test_iam_permissions': ('resource', 'permissions', ), 'update_entry': ('entry', 'update_mask', ), 'update_entry_group': ('entry_group', 'update_mask', ), diff --git a/scripts/fixup_datacatalog_v1beta1_keywords.py b/scripts/fixup_datacatalog_v1beta1_keywords.py index 5304bf50..b5ef9099 100644 --- a/scripts/fixup_datacatalog_v1beta1_keywords.py +++ b/scripts/fixup_datacatalog_v1beta1_keywords.py @@ -71,7 +71,7 @@ class datacatalogCallTransformer(cst.CSTTransformer): 'lookup_entry': ('linked_resource', 'sql_resource', ), 'rename_tag_template_field': ('name', 'new_tag_template_field_id', ), 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', ), - 'set_iam_policy': ('resource', 'policy_', ), + 'set_iam_policy': ('resource', 'policy', ), 'test_iam_permissions': ('resource', 'permissions', ), 'update_entry': ('entry', 'update_mask', ), 'update_entry_group': ('entry_group', 'update_mask', ), diff --git a/synth.py b/synth.py index ed4e1f6b..50bde339 100644 --- a/synth.py +++ b/synth.py @@ -25,10 +25,10 @@ # ---------------------------------------------------------------------------- # Generate datacatalog GAPIC layer # ---------------------------------------------------------------------------- -versions = ['v1', 'v1beta1'] +versions = ["v1", "v1beta1"] for version in versions: library = gapic.py_library( - service='datacatalog', + service="datacatalog", version=version, bazel_target=f"//google/cloud/datacatalog/{version}:datacatalog-{version}-py", include_protos=True, @@ -37,23 +37,30 @@ s.move( library, excludes=[ - 'docs/conf.py', - 'docs/index.rst', - 'README.rst', - 'nox*.py', - 'setup.py', - 'setup.cfg', + "docs/conf.py", + "docs/index.rst", + "README.rst", + "nox*.py", + "setup.py", + "setup.cfg", ], ) +# Rename `policy_` to `policy` to avoid breaking change in a GA library +# Only replace if a non-alphanumeric (\W) character follows `policy_` +s.replace( + ["google/**/*.py", "scripts/fixup*.py", "tests/unit/**/*.py"], + "policy_(\W)", + "policy\g<1>", +) + # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library( - samples=True, - microgenerator=True, -) -s.move(templated_files, excludes=[".coveragerc"]) # microgenerator has a good .coveragerc file +templated_files = common.py_library(samples=True, microgenerator=True,) +s.move( + templated_files, excludes=[".coveragerc"] +) # microgenerator has a good .coveragerc file # ---------------------------------------------------------------------------- # Samples templates diff --git a/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/tests/unit/gapic/datacatalog_v1/test_data_catalog.py index 6eeca453..5d5fb1f2 100644 --- a/tests/unit/gapic/datacatalog_v1/test_data_catalog.py +++ b/tests/unit/gapic/datacatalog_v1/test_data_catalog.py @@ -6852,7 +6852,7 @@ def test_set_iam_policy_from_dict_foreign(): response = client.set_iam_policy( request={ "resource": "resource_value", - "policy_": gi_policy.Policy(version=774), + "policy": gi_policy.Policy(version=774), } ) call.assert_called() diff --git a/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py b/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py index 031279ac..f77cff38 100644 --- a/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py +++ b/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py @@ -3481,7 +3481,7 @@ def test_set_iam_policy_from_dict_foreign(): response = client.set_iam_policy( request={ "resource": "resource_value", - "policy_": gi_policy.Policy(version=774), + "policy": gi_policy.Policy(version=774), } ) call.assert_called() diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py b/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py index 2ab037f7..1e9036b4 100644 --- a/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py +++ b/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py @@ -6523,7 +6523,7 @@ def test_set_iam_policy_from_dict_foreign(): response = client.set_iam_policy( request={ "resource": "resource_value", - "policy_": giv_policy.Policy(version=774), + "policy": giv_policy.Policy(version=774), } ) call.assert_called() diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py index 0282201e..64df8e1b 100644 --- a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py +++ b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py @@ -3461,7 +3461,7 @@ def test_set_iam_policy_from_dict_foreign(): response = client.set_iam_policy( request={ "resource": "resource_value", - "policy_": giv_policy.Policy(version=774), + "policy": giv_policy.Policy(version=774), } ) call.assert_called()