|
30 | 30 | from google.cloud.bigtable.row_data import Cell |
31 | 31 | from google.cloud.bigtable.row_data import PartialRowData |
32 | 32 | from google.cloud.environment_vars import BIGTABLE_EMULATOR |
| 33 | +from grpc._channel import _Rendezvous |
33 | 34 |
|
34 | 35 | from retry import RetryErrors |
35 | 36 | from retry import RetryResult |
@@ -378,23 +379,22 @@ def test_read_large_cells_over_limit(self): |
378 | 379 | row = self._table.row(ROW_KEY) |
379 | 380 | self.rows_to_delete.append(row) |
380 | 381 |
|
381 | | - data = '1' * 51 * 1024 * 1024 # 51MB. |
| 382 | + data = b'1' * 51 * 1024 * 1024 # 51MB. |
382 | 383 | row.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, data) |
383 | 384 | row.commit() |
384 | 385 | row.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, data) |
385 | 386 | row.commit() |
386 | 387 |
|
387 | | - with self.assertRaises(Exception): |
| 388 | + with self.assertRaises(_Rendezvous): |
388 | 389 | partial_row_data = self._table.read_row(ROW_KEY) |
389 | 390 |
|
390 | | - |
391 | 391 | def test_read_large_cell_over_limit(self): |
392 | 392 | row = self._table.row(ROW_KEY) |
393 | 393 | self.rows_to_delete.append(row) |
394 | 394 |
|
395 | | - data = '1' * 101 * 1024 * 1024 # 11MB of 1's. |
| 395 | + data = b'1' * 101 * 1024 * 1024 # 101MB of 1's. |
396 | 396 | row.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, data) |
397 | | - with self.assertRaises(Exception): |
| 397 | + with self.assertRaises(_Rendezvous): |
398 | 398 | row.commit() |
399 | 399 |
|
400 | 400 | def test_read_row(self): |
|
0 commit comments