Skip to content

Commit e7ef590

Browse files
authored
Add BigTable source format in BigQuery tables (#4155)
* Add BigTable source format in BigQuery * Add tests * Fix test config typos * Add bigtable external source in the bigquery tests * typo * Shrink resource name * Fix master merge * Ignore deletion_protection in TestAccBigQueryDataTable_bigtable
1 parent 8728bc8 commit e7ef590

File tree

4 files changed

+78
-4
lines changed

4 files changed

+78
-4
lines changed

mmv1/products/bigquery/api.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -725,7 +725,8 @@ objects:
725725
description: |
726726
The format of the data files. For CSV files, specify "CSV". For datastore backups, specify "DATASTORE_BACKUP".
727727
For newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". For Avro, specify "AVRO". For parquet, specify "PARQUET".
728-
For orc, specify "ORC". The default value is CSV.
728+
For orc, specify "ORC". [Beta] For Bigtable, specify "BIGTABLE".
729+
The default value is CSV.
729730
default_value: 'CSV'
730731
- !ruby/object:Api::Type::Boolean
731732
name: 'allowJaggedRows'

mmv1/third_party/terraform/resources/resource_bigquery_table.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -389,7 +389,7 @@ func resourceBigQueryTable() *schema.Resource {
389389
Required: true,
390390
Description: `The data format. Supported values are: "CSV", "GOOGLE_SHEETS", "NEWLINE_DELIMITED_JSON", "AVRO", "PARQUET", "ORC" and "DATASTORE_BACKUP". To use "GOOGLE_SHEETS" the scopes must include "googleapis.com/auth/drive.readonly".`,
391391
ValidateFunc: validation.StringInSlice([]string{
392-
"CSV", "GOOGLE_SHEETS", "NEWLINE_DELIMITED_JSON", "AVRO", "DATASTORE_BACKUP", "PARQUET", "ORC",
392+
"CSV", "GOOGLE_SHEETS", "NEWLINE_DELIMITED_JSON", "AVRO", "DATASTORE_BACKUP", "PARQUET", "ORC", "BIGTABLE",
393393
}, false),
394394
},
395395
// SourceURIs [Required] The fully-qualified URIs that point to your data in Google Cloud.

mmv1/third_party/terraform/tests/resource_bigquery_table_test.go

Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -425,6 +425,32 @@ func TestAccBigQueryExternalDataTable_CSV(t *testing.T) {
425425
})
426426
}
427427

428+
func TestAccBigQueryDataTable_bigtable(t *testing.T) {
429+
t.Parallel()
430+
431+
context := map[string]interface{}{
432+
"random_suffix": randString(t, 8),
433+
"project": getTestProjectFromEnv(),
434+
}
435+
436+
vcrTest(t, resource.TestCase{
437+
PreCheck: func() { testAccPreCheck(t) },
438+
Providers: testAccProviders,
439+
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
440+
Steps: []resource.TestStep{
441+
{
442+
Config: testAccBigQueryTableFromBigtable(context),
443+
},
444+
{
445+
ResourceName: "google_bigquery_table.table",
446+
ImportState: true,
447+
ImportStateVerify: true,
448+
ImportStateVerifyIgnore: []string{"deletion_protection"},
449+
},
450+
},
451+
})
452+
}
453+
428454
func TestAccBigQueryDataTable_sheet(t *testing.T) {
429455
t.Parallel()
430456

@@ -1406,6 +1432,53 @@ resource "google_bigquery_table" "test" {
14061432
`, datasetID, bucketName, objectName, content, tableID, format, quoteChar)
14071433
}
14081434

1435+
func testAccBigQueryTableFromBigtable(context map[string]interface{}) string {
1436+
return Nprintf(`
1437+
resource "google_bigtable_instance" "instance" {
1438+
name = "tf-test-bigtable-inst-%{random_suffix}"
1439+
cluster {
1440+
cluster_id = "tf-test-bigtable-%{random_suffix}"
1441+
zone = "us-central1-b"
1442+
}
1443+
instance_type = "DEVELOPMENT"
1444+
deletion_protection = false
1445+
}
1446+
resource "google_bigtable_table" "table" {
1447+
name = "%{random_suffix}"
1448+
instance_name = google_bigtable_instance.instance.name
1449+
column_family {
1450+
family = "cf-%{random_suffix}-first"
1451+
}
1452+
column_family {
1453+
family = "cf-%{random_suffix}-second"
1454+
}
1455+
}
1456+
resource "google_bigquery_table" "table" {
1457+
deletion_protection = false
1458+
dataset_id = google_bigquery_dataset.dataset.dataset_id
1459+
table_id = "tf_test_bigtable_%{random_suffix}"
1460+
external_data_configuration {
1461+
autodetect = true
1462+
source_format = "BIGTABLE"
1463+
ignore_unknown_values = true
1464+
source_uris = [
1465+
"https://googleapis.com/bigtable/${google_bigtable_table.table.id}",
1466+
]
1467+
}
1468+
}
1469+
resource "google_bigquery_dataset" "dataset" {
1470+
dataset_id = "tf_test_ds_%{random_suffix}"
1471+
friendly_name = "test"
1472+
description = "This is a test description"
1473+
location = "EU"
1474+
default_table_expiration_ms = 3600000
1475+
labels = {
1476+
env = "default"
1477+
}
1478+
}
1479+
`, context)
1480+
}
1481+
14091482
func testAccBigQueryTableFromSheet(context map[string]interface{}) string {
14101483
return Nprintf(`
14111484
resource "google_bigquery_table" "table" {

mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -186,8 +186,8 @@ The `external_data_configuration` block supports:
186186
`google_bigquery_table.schema`
187187

188188
* `source_format` (Required) - The data format. Supported values are:
189-
"CSV", "GOOGLE_SHEETS", "NEWLINE_DELIMITED_JSON", "AVRO", "PARQUET", "ORC"
190-
and "DATASTORE_BACKUP". To use "GOOGLE_SHEETS"
189+
"CSV", "GOOGLE_SHEETS", "NEWLINE_DELIMITED_JSON", "AVRO", "PARQUET", "ORC",
190+
"DATSTORE_BACKUP", and "BIGTABLE". To use "GOOGLE_SHEETS"
191191
the `scopes` must include
192192
"https://www.googleapis.com/auth/drive.readonly".
193193

0 commit comments

Comments
 (0)