Skip to content

Commit ce331bb

Browse files
Add Support for Azure Blob Storage Transfer (#4751) (#9311)
* mark field as updatable Co-authored-by: upodroid <cy@borg.dev> * add azure support Co-authored-by: upodroid <cy@borg.dev> * fix typo * change path options * revert doc change Signed-off-by: Modular Magician <magic-modules@google.com>
1 parent 85979d4 commit ce331bb

File tree

3 files changed

+123
-9
lines changed

3 files changed

+123
-9
lines changed

.changelog/4751.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
```release-note:enhancement
2+
storage-transfer: added support for `azure_blob_storage_data_source` to `google_storage_transfer_job`
3+
```

google/resource_storage_transfer_job.go

Lines changed: 102 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ var (
3030
"transfer_spec.0.gcs_data_source",
3131
"transfer_spec.0.aws_s3_data_source",
3232
"transfer_spec.0.http_data_source",
33+
"transfer_spec.0.azure_blob_storage_data_source",
3334
}
3435
)
3536

@@ -99,7 +100,15 @@ func resourceStorageTransferJob() *schema.Resource {
99100
MaxItems: 1,
100101
Elem: httpDataSchema(),
101102
ExactlyOneOf: transferSpecDataSourceKeys,
102-
Description: `An HTTP URL data source.`,
103+
Description: `A HTTP URL data source.`,
104+
},
105+
"azure_blob_storage_data_source": {
106+
Type: schema.TypeList,
107+
Optional: true,
108+
MaxItems: 1,
109+
Elem: azureBlobStorageDataSchema(),
110+
ExactlyOneOf: transferSpecDataSourceKeys,
111+
Description: `An Azure Blob Storage data source.`,
103112
},
104113
},
105114
},
@@ -370,6 +379,45 @@ func httpDataSchema() *schema.Resource {
370379
}
371380
}
372381

382+
func azureBlobStorageDataSchema() *schema.Resource {
383+
return &schema.Resource{
384+
Schema: map[string]*schema.Schema{
385+
"storage_account": {
386+
Required: true,
387+
Type: schema.TypeString,
388+
Description: `The name of the Azure Storage account.`,
389+
},
390+
"container": {
391+
Required: true,
392+
Type: schema.TypeString,
393+
Description: `The container to transfer from the Azure Storage account.`,
394+
},
395+
"path": {
396+
Optional: true,
397+
Computed: true,
398+
Type: schema.TypeString,
399+
Description: `Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.`,
400+
},
401+
"azure_credentials": {
402+
Type: schema.TypeList,
403+
Required: true,
404+
MaxItems: 1,
405+
Elem: &schema.Resource{
406+
Schema: map[string]*schema.Schema{
407+
"sas_token": {
408+
Type: schema.TypeString,
409+
Required: true,
410+
Sensitive: true,
411+
Description: `Azure shared access signature.`,
412+
},
413+
},
414+
},
415+
Description: ` Credentials used to authenticate API requests to Azure.`,
416+
},
417+
},
418+
}
419+
}
420+
373421
func diffSuppressEmptyStartTimeOfDay(k, old, new string, d *schema.ResourceData) bool {
374422
return k == "schedule.0.start_time_of_day.#" && old == "1" && new == "0"
375423
}
@@ -769,6 +817,50 @@ func flattenHttpData(httpData *storagetransfer.HttpData) []map[string]interface{
769817
return []map[string]interface{}{data}
770818
}
771819

820+
func expandAzureCredentials(azureCredentials []interface{}) *storagetransfer.AzureCredentials {
821+
if len(azureCredentials) == 0 || azureCredentials[0] == nil {
822+
return nil
823+
}
824+
825+
azureCredential := azureCredentials[0].(map[string]interface{})
826+
return &storagetransfer.AzureCredentials{
827+
SasToken: azureCredential["sas_token"].(string),
828+
}
829+
}
830+
831+
func flattenAzureCredentials(d *schema.ResourceData) []map[string]interface{} {
832+
data := map[string]interface{}{
833+
"sas_token": d.Get("transfer_spec.0.azure_blob_storage_data_source.0.azure_credentials.0.sas_token"),
834+
}
835+
836+
return []map[string]interface{}{data}
837+
}
838+
839+
func expandAzureBlobStorageData(azureBlobStorageDatas []interface{}) *storagetransfer.AzureBlobStorageData {
840+
if len(azureBlobStorageDatas) == 0 || azureBlobStorageDatas[0] == nil {
841+
return nil
842+
}
843+
844+
azureBlobStorageData := azureBlobStorageDatas[0].(map[string]interface{})
845+
return &storagetransfer.AzureBlobStorageData{
846+
Container: azureBlobStorageData["container"].(string),
847+
Path: azureBlobStorageData["path"].(string),
848+
StorageAccount: azureBlobStorageData["storage_account"].(string),
849+
AzureCredentials: expandAzureCredentials(azureBlobStorageData["sas_token"].([]interface{})),
850+
}
851+
}
852+
853+
func flattenAzureBlobStorageData(azureBlobStorageData *storagetransfer.AzureBlobStorageData, d *schema.ResourceData) []map[string]interface{} {
854+
data := map[string]interface{}{
855+
"container": azureBlobStorageData.Container,
856+
"path": azureBlobStorageData.Path,
857+
"storage_account": azureBlobStorageData.StorageAccount,
858+
"azure_credentials": flattenAzureCredentials(d),
859+
}
860+
861+
return []map[string]interface{}{data}
862+
}
863+
772864
func expandObjectConditions(conditions []interface{}) *storagetransfer.ObjectConditions {
773865
if len(conditions) == 0 || conditions[0] == nil {
774866
return nil
@@ -823,12 +915,13 @@ func expandTransferSpecs(transferSpecs []interface{}) *storagetransfer.TransferS
823915

824916
transferSpec := transferSpecs[0].(map[string]interface{})
825917
return &storagetransfer.TransferSpec{
826-
GcsDataSink: expandGcsData(transferSpec["gcs_data_sink"].([]interface{})),
827-
ObjectConditions: expandObjectConditions(transferSpec["object_conditions"].([]interface{})),
828-
TransferOptions: expandTransferOptions(transferSpec["transfer_options"].([]interface{})),
829-
GcsDataSource: expandGcsData(transferSpec["gcs_data_source"].([]interface{})),
830-
AwsS3DataSource: expandAwsS3Data(transferSpec["aws_s3_data_source"].([]interface{})),
831-
HttpDataSource: expandHttpData(transferSpec["http_data_source"].([]interface{})),
918+
GcsDataSink: expandGcsData(transferSpec["gcs_data_sink"].([]interface{})),
919+
ObjectConditions: expandObjectConditions(transferSpec["object_conditions"].([]interface{})),
920+
TransferOptions: expandTransferOptions(transferSpec["transfer_options"].([]interface{})),
921+
GcsDataSource: expandGcsData(transferSpec["gcs_data_source"].([]interface{})),
922+
AwsS3DataSource: expandAwsS3Data(transferSpec["aws_s3_data_source"].([]interface{})),
923+
HttpDataSource: expandHttpData(transferSpec["http_data_source"].([]interface{})),
924+
AzureBlobStorageDataSource: expandAzureBlobStorageData(transferSpec["azure_blob_storage_data_source"].([]interface{})),
832925
}
833926
}
834927

@@ -850,6 +943,8 @@ func flattenTransferSpec(transferSpec *storagetransfer.TransferSpec, d *schema.R
850943
data["aws_s3_data_source"] = flattenAwsS3Data(transferSpec.AwsS3DataSource, d)
851944
} else if transferSpec.HttpDataSource != nil {
852945
data["http_data_source"] = flattenHttpData(transferSpec.HttpDataSource)
946+
} else if transferSpec.AzureBlobStorageDataSource != nil {
947+
data["azure_blob_storage_data_source"] = flattenAzureBlobStorageData(transferSpec.AzureBlobStorageDataSource, d)
853948
}
854949

855950
return []map[string][]map[string]interface{}{data}

website/docs/r/storage_transfer_job.html.markdown

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ Creates a new Transfer Job in Google Cloud Storage Transfer.
1414
To get more information about Google Cloud Storage Transfer, see:
1515

1616
* [Overview](https://cloud.google.com/storage-transfer/docs/overview)
17-
* [API documentation](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs#TransferJob)
17+
* [API documentation](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs)
1818
* How-to Guides
1919
* [Configuring Access to Data Sources and Sinks](https://cloud.google.com/storage-transfer/docs/configure-access)
2020

@@ -118,7 +118,9 @@ The `transfer_spec` block supports:
118118

119119
* `aws_s3_data_source` - (Optional) An AWS S3 data source. Structure documented below.
120120

121-
* `http_data_source` - (Optional) An HTTP URL data source. Structure documented below.
121+
* `http_data_source` - (Optional) A HTTP URL data source. Structure documented below.
122+
123+
* `azure_blob_storage_data_source` - (Optional) An Azure Blob Storage data source. Structure documented below.
122124

123125
The `schedule` block supports:
124126

@@ -172,6 +174,20 @@ The `http_data_source` block supports:
172174

173175
* `list_url` - (Required) The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.
174176

177+
The `azure_blob_storage_data_source` block supports:
178+
179+
* `storage_account` - (Required) The name of the Azure Storage account.
180+
181+
* `container` - (Required) The container to transfer from the Azure Storage account.`
182+
183+
* `path` - (Required) Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
184+
185+
* `azure_credentials` - (Required) Credentials used to authenticate API requests to Azure block.
186+
187+
The `azure_credentials` block supports:
188+
189+
* `sas_token` - (Required) Azure shared access signature. See [Grant limited access to Azure Storage resources using shared access signatures (SAS)](https://docs.microsoft.com/en-us/azure/storage/common/storage-sas-overview).
190+
175191
The `schedule_start_date` and `schedule_end_date` blocks support:
176192

177193
* `year` - (Required) Year of date. Must be from 1 to 9999.

0 commit comments

Comments
 (0)