Skip to content

Commit ecf7b31

Browse files
fix(deps): Require google-api-core >=1.34.0, >=2.11.0 (#539)
* fix(deps): Require google-api-core >=1.34.0, >=2.11.0 fix: Drop usage of pkg_resources fix: Fix timeout default values docs(samples): Snippetgen should call await on the operation coroutine before calling result PiperOrigin-RevId: 493260409 Source-Link: googleapis/googleapis@fea4387 Source-Link: googleapis/googleapis-gen@387b734 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzg3YjczNDRjNzUyOWVlNDRiZTg0ZTYxM2IxOWE4MjA1MDhjNjEyYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): require google-api-core>=1.34.0,>=2.11.0 Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou <partheniou@google.com>
1 parent 4654ccc commit ecf7b31

File tree

15 files changed

+98
-151
lines changed

15 files changed

+98
-151
lines changed

packages/google-cloud-bigquery-storage/.coveragerc

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,3 @@ exclude_lines =
1010
pragma: NO COVER
1111
# Ignore debug-only repr
1212
def __repr__
13-
# Ignore pkg_resources exceptions.
14-
# This is added at the module level as a safeguard for if someone
15-
# generates the code and tries to run it without pip installing. This
16-
# makes it virtually impossible to test properly.
17-
except pkg_resources.DistributionNotFound

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,8 @@
2929
Type,
3030
Union,
3131
)
32-
import pkg_resources
32+
33+
from google.cloud.bigquery_storage_v1 import gapic_version as package_version
3334

3435
from google.api_core.client_options import ClientOptions
3536
from google.api_core import exceptions as core_exceptions
@@ -227,7 +228,7 @@ async def create_read_session(
227228
read_session: Optional[stream.ReadSession] = None,
228229
max_stream_count: Optional[int] = None,
229230
retry: OptionalRetry = gapic_v1.method.DEFAULT,
230-
timeout: Optional[float] = None,
231+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
231232
metadata: Sequence[Tuple[str, str]] = (),
232233
) -> stream.ReadSession:
233234
r"""Creates a new read session. A read session divides
@@ -389,7 +390,7 @@ def read_rows(
389390
read_stream: Optional[str] = None,
390391
offset: Optional[int] = None,
391392
retry: OptionalRetry = gapic_v1.method.DEFAULT,
392-
timeout: Optional[float] = None,
393+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
393394
metadata: Sequence[Tuple[str, str]] = (),
394395
) -> Awaitable[AsyncIterable[storage.ReadRowsResponse]]:
395396
r"""Reads rows from the stream in the format prescribed
@@ -518,7 +519,7 @@ async def split_read_stream(
518519
request: Optional[Union[storage.SplitReadStreamRequest, dict]] = None,
519520
*,
520521
retry: OptionalRetry = gapic_v1.method.DEFAULT,
521-
timeout: Optional[float] = None,
522+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
522523
metadata: Sequence[Tuple[str, str]] = (),
523524
) -> storage.SplitReadStreamResponse:
524525
r"""Splits a given ``ReadStream`` into two ``ReadStream`` objects.
@@ -620,14 +621,9 @@ async def __aexit__(self, exc_type, exc, tb):
620621
await self.transport.close()
621622

622623

623-
try:
624-
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
625-
gapic_version=pkg_resources.get_distribution(
626-
"google-cloud-bigquery-storage",
627-
).version,
628-
)
629-
except pkg_resources.DistributionNotFound:
630-
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
624+
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
625+
gapic_version=package_version.__version__
626+
)
631627

632628

633629
__all__ = ("BigQueryReadAsyncClient",)

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/services/big_query_read/client.py

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,8 @@
2929
Union,
3030
cast,
3131
)
32-
import pkg_resources
32+
33+
from google.cloud.bigquery_storage_v1 import gapic_version as package_version
3334

3435
from google.api_core import client_options as client_options_lib
3536
from google.api_core import exceptions as core_exceptions
@@ -497,7 +498,7 @@ def create_read_session(
497498
read_session: Optional[stream.ReadSession] = None,
498499
max_stream_count: Optional[int] = None,
499500
retry: OptionalRetry = gapic_v1.method.DEFAULT,
500-
timeout: Optional[float] = None,
501+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
501502
metadata: Sequence[Tuple[str, str]] = (),
502503
) -> stream.ReadSession:
503504
r"""Creates a new read session. A read session divides
@@ -649,7 +650,7 @@ def read_rows(
649650
read_stream: Optional[str] = None,
650651
offset: Optional[int] = None,
651652
retry: OptionalRetry = gapic_v1.method.DEFAULT,
652-
timeout: Optional[float] = None,
653+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
653654
metadata: Sequence[Tuple[str, str]] = (),
654655
) -> Iterable[storage.ReadRowsResponse]:
655656
r"""Reads rows from the stream in the format prescribed
@@ -769,7 +770,7 @@ def split_read_stream(
769770
request: Optional[Union[storage.SplitReadStreamRequest, dict]] = None,
770771
*,
771772
retry: OptionalRetry = gapic_v1.method.DEFAULT,
772-
timeout: Optional[float] = None,
773+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
773774
metadata: Sequence[Tuple[str, str]] = (),
774775
) -> storage.SplitReadStreamResponse:
775776
r"""Splits a given ``ReadStream`` into two ``ReadStream`` objects.
@@ -869,14 +870,9 @@ def __exit__(self, type, value, traceback):
869870
self.transport.close()
870871

871872

872-
try:
873-
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
874-
gapic_version=pkg_resources.get_distribution(
875-
"google-cloud-bigquery-storage",
876-
).version,
877-
)
878-
except pkg_resources.DistributionNotFound:
879-
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
873+
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
874+
gapic_version=package_version.__version__
875+
)
880876

881877

882878
__all__ = ("BigQueryReadClient",)

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@
1515
#
1616
import abc
1717
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
18-
import pkg_resources
18+
19+
from google.cloud.bigquery_storage_v1 import gapic_version as package_version
1920

2021
import google.auth # type: ignore
2122
import google.api_core
@@ -28,14 +29,9 @@
2829
from google.cloud.bigquery_storage_v1.types import storage
2930
from google.cloud.bigquery_storage_v1.types import stream
3031

31-
try:
32-
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
33-
gapic_version=pkg_resources.get_distribution(
34-
"google-cloud-bigquery-storage",
35-
).version,
36-
)
37-
except pkg_resources.DistributionNotFound:
38-
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
32+
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
33+
gapic_version=package_version.__version__
34+
)
3935

4036

4137
class BigQueryReadTransport(abc.ABC):

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/services/big_query_write/async_client.py

Lines changed: 11 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,8 @@
3030
Type,
3131
Union,
3232
)
33-
import pkg_resources
33+
34+
from google.cloud.bigquery_storage_v1 import gapic_version as package_version
3435

3536
from google.api_core.client_options import ClientOptions
3637
from google.api_core import exceptions as core_exceptions
@@ -231,7 +232,7 @@ async def create_write_stream(
231232
parent: Optional[str] = None,
232233
write_stream: Optional[stream.WriteStream] = None,
233234
retry: OptionalRetry = gapic_v1.method.DEFAULT,
234-
timeout: Optional[float] = None,
235+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
235236
metadata: Sequence[Tuple[str, str]] = (),
236237
) -> stream.WriteStream:
237238
r"""Creates a write stream to the given table. Additionally, every
@@ -358,7 +359,7 @@ def append_rows(
358359
requests: Optional[AsyncIterator[storage.AppendRowsRequest]] = None,
359360
*,
360361
retry: OptionalRetry = gapic_v1.method.DEFAULT,
361-
timeout: Optional[float] = None,
362+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
362363
metadata: Sequence[Tuple[str, str]] = (),
363364
) -> Awaitable[AsyncIterable[storage.AppendRowsResponse]]:
364365
r"""Appends data to the given stream.
@@ -492,7 +493,7 @@ async def get_write_stream(
492493
*,
493494
name: Optional[str] = None,
494495
retry: OptionalRetry = gapic_v1.method.DEFAULT,
495-
timeout: Optional[float] = None,
496+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
496497
metadata: Sequence[Tuple[str, str]] = (),
497498
) -> stream.WriteStream:
498499
r"""Gets information about a write stream.
@@ -605,7 +606,7 @@ async def finalize_write_stream(
605606
*,
606607
name: Optional[str] = None,
607608
retry: OptionalRetry = gapic_v1.method.DEFAULT,
608-
timeout: Optional[float] = None,
609+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
609610
metadata: Sequence[Tuple[str, str]] = (),
610611
) -> storage.FinalizeWriteStreamResponse:
611612
r"""Finalize a write stream so that no new data can be appended to
@@ -716,7 +717,7 @@ async def batch_commit_write_streams(
716717
*,
717718
parent: Optional[str] = None,
718719
retry: OptionalRetry = gapic_v1.method.DEFAULT,
719-
timeout: Optional[float] = None,
720+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
720721
metadata: Sequence[Tuple[str, str]] = (),
721722
) -> storage.BatchCommitWriteStreamsResponse:
722723
r"""Atomically commits a group of ``PENDING`` streams that belong to
@@ -833,7 +834,7 @@ async def flush_rows(
833834
*,
834835
write_stream: Optional[str] = None,
835836
retry: OptionalRetry = gapic_v1.method.DEFAULT,
836-
timeout: Optional[float] = None,
837+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
837838
metadata: Sequence[Tuple[str, str]] = (),
838839
) -> storage.FlushRowsResponse:
839840
r"""Flushes rows to a BUFFERED stream.
@@ -954,14 +955,9 @@ async def __aexit__(self, exc_type, exc, tb):
954955
await self.transport.close()
955956

956957

957-
try:
958-
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
959-
gapic_version=pkg_resources.get_distribution(
960-
"google-cloud-bigquery-storage",
961-
).version,
962-
)
963-
except pkg_resources.DistributionNotFound:
964-
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
958+
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
959+
gapic_version=package_version.__version__
960+
)
965961

966962

967963
__all__ = ("BigQueryWriteAsyncClient",)

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/services/big_query_write/client.py

Lines changed: 11 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,8 @@
3030
Union,
3131
cast,
3232
)
33-
import pkg_resources
33+
34+
from google.cloud.bigquery_storage_v1 import gapic_version as package_version
3435

3536
from google.api_core import client_options as client_options_lib
3637
from google.api_core import exceptions as core_exceptions
@@ -477,7 +478,7 @@ def create_write_stream(
477478
parent: Optional[str] = None,
478479
write_stream: Optional[stream.WriteStream] = None,
479480
retry: OptionalRetry = gapic_v1.method.DEFAULT,
480-
timeout: Optional[float] = None,
481+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
481482
metadata: Sequence[Tuple[str, str]] = (),
482483
) -> stream.WriteStream:
483484
r"""Creates a write stream to the given table. Additionally, every
@@ -593,7 +594,7 @@ def append_rows(
593594
requests: Optional[Iterator[storage.AppendRowsRequest]] = None,
594595
*,
595596
retry: OptionalRetry = gapic_v1.method.DEFAULT,
596-
timeout: Optional[float] = None,
597+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
597598
metadata: Sequence[Tuple[str, str]] = (),
598599
) -> Iterable[storage.AppendRowsResponse]:
599600
r"""Appends data to the given stream.
@@ -714,7 +715,7 @@ def get_write_stream(
714715
*,
715716
name: Optional[str] = None,
716717
retry: OptionalRetry = gapic_v1.method.DEFAULT,
717-
timeout: Optional[float] = None,
718+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
718719
metadata: Sequence[Tuple[str, str]] = (),
719720
) -> stream.WriteStream:
720721
r"""Gets information about a write stream.
@@ -817,7 +818,7 @@ def finalize_write_stream(
817818
*,
818819
name: Optional[str] = None,
819820
retry: OptionalRetry = gapic_v1.method.DEFAULT,
820-
timeout: Optional[float] = None,
821+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
821822
metadata: Sequence[Tuple[str, str]] = (),
822823
) -> storage.FinalizeWriteStreamResponse:
823824
r"""Finalize a write stream so that no new data can be appended to
@@ -918,7 +919,7 @@ def batch_commit_write_streams(
918919
*,
919920
parent: Optional[str] = None,
920921
retry: OptionalRetry = gapic_v1.method.DEFAULT,
921-
timeout: Optional[float] = None,
922+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
922923
metadata: Sequence[Tuple[str, str]] = (),
923924
) -> storage.BatchCommitWriteStreamsResponse:
924925
r"""Atomically commits a group of ``PENDING`` streams that belong to
@@ -1027,7 +1028,7 @@ def flush_rows(
10271028
*,
10281029
write_stream: Optional[str] = None,
10291030
retry: OptionalRetry = gapic_v1.method.DEFAULT,
1030-
timeout: Optional[float] = None,
1031+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
10311032
metadata: Sequence[Tuple[str, str]] = (),
10321033
) -> storage.FlushRowsResponse:
10331034
r"""Flushes rows to a BUFFERED stream.
@@ -1145,14 +1146,9 @@ def __exit__(self, type, value, traceback):
11451146
self.transport.close()
11461147

11471148

1148-
try:
1149-
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
1150-
gapic_version=pkg_resources.get_distribution(
1151-
"google-cloud-bigquery-storage",
1152-
).version,
1153-
)
1154-
except pkg_resources.DistributionNotFound:
1155-
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
1149+
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
1150+
gapic_version=package_version.__version__
1151+
)
11561152

11571153

11581154
__all__ = ("BigQueryWriteClient",)

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/services/big_query_write/transports/base.py

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@
1515
#
1616
import abc
1717
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
18-
import pkg_resources
18+
19+
from google.cloud.bigquery_storage_v1 import gapic_version as package_version
1920

2021
import google.auth # type: ignore
2122
import google.api_core
@@ -28,14 +29,9 @@
2829
from google.cloud.bigquery_storage_v1.types import storage
2930
from google.cloud.bigquery_storage_v1.types import stream
3031

31-
try:
32-
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
33-
gapic_version=pkg_resources.get_distribution(
34-
"google-cloud-bigquery-storage",
35-
).version,
36-
)
37-
except pkg_resources.DistributionNotFound:
38-
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
32+
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
33+
gapic_version=package_version.__version__
34+
)
3935

4036

4137
class BigQueryWriteTransport(abc.ABC):

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1beta2/services/big_query_read/async_client.py

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,8 @@
2929
Type,
3030
Union,
3131
)
32-
import pkg_resources
32+
33+
from google.cloud.bigquery_storage_v1beta2 import gapic_version as package_version
3334

3435
from google.api_core.client_options import ClientOptions
3536
from google.api_core import exceptions as core_exceptions
@@ -229,7 +230,7 @@ async def create_read_session(
229230
read_session: Optional[stream.ReadSession] = None,
230231
max_stream_count: Optional[int] = None,
231232
retry: OptionalRetry = gapic_v1.method.DEFAULT,
232-
timeout: Optional[float] = None,
233+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
233234
metadata: Sequence[Tuple[str, str]] = (),
234235
) -> stream.ReadSession:
235236
r"""Creates a new read session. A read session divides
@@ -392,7 +393,7 @@ def read_rows(
392393
read_stream: Optional[str] = None,
393394
offset: Optional[int] = None,
394395
retry: OptionalRetry = gapic_v1.method.DEFAULT,
395-
timeout: Optional[float] = None,
396+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
396397
metadata: Sequence[Tuple[str, str]] = (),
397398
) -> Awaitable[AsyncIterable[storage.ReadRowsResponse]]:
398399
r"""Reads rows from the stream in the format prescribed
@@ -521,7 +522,7 @@ async def split_read_stream(
521522
request: Optional[Union[storage.SplitReadStreamRequest, dict]] = None,
522523
*,
523524
retry: OptionalRetry = gapic_v1.method.DEFAULT,
524-
timeout: Optional[float] = None,
525+
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
525526
metadata: Sequence[Tuple[str, str]] = (),
526527
) -> storage.SplitReadStreamResponse:
527528
r"""Splits a given ``ReadStream`` into two ``ReadStream`` objects.
@@ -623,14 +624,9 @@ async def __aexit__(self, exc_type, exc, tb):
623624
await self.transport.close()
624625

625626

626-
try:
627-
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
628-
gapic_version=pkg_resources.get_distribution(
629-
"google-cloud-bigquery-storage",
630-
).version,
631-
)
632-
except pkg_resources.DistributionNotFound:
633-
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
627+
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
628+
gapic_version=package_version.__version__
629+
)
634630

635631

636632
__all__ = ("BigQueryReadAsyncClient",)

0 commit comments

Comments
 (0)