Skip to content

Commit c474b66

Browse files
davenportjw“Steve
andcommitted
fix: Lakehouse cleanup (#9)
Co-authored-by: “Steve <“steveswalker@google.com”>
1 parent c86c4a4 commit c474b66

2 files changed

Lines changed: 40 additions & 125 deletions

File tree

bigquery.tf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,4 +36,4 @@ resource "google_bigquery_routine" "create_view_ecommerce" {
3636
routine_type = "PROCEDURE"
3737
language = "SQL"
3838
definition_body = file("${path.module}/assets/sql/view_ecommerce.sql")
39-
}
39+
}

main.tf

Lines changed: 39 additions & 124 deletions
Original file line numberDiff line numberDiff line change
@@ -84,123 +84,9 @@ data "google_storage_project_service_account" "gcs_account" {
8484
project = module.project-services.project_id
8585
}
8686

87-
88-
#give workflows_sa bq data access
89-
resource "google_project_iam_member" "workflows_sa_bq_connection" {
90-
project = module.project-services.project_id
91-
role = "roles/bigquery.connectionAdmin"
92-
member = "serviceAccount:${google_service_account.workflows_sa.email}"
93-
94-
depends_on = [
95-
google_service_account.workflows_sa
96-
]
97-
}
98-
99-
# Set up BigQuery resources
100-
# # Create the BigQuery dataset
101-
resource "google_bigquery_dataset" "gcp_lakehouse_ds" {
102-
project = module.project-services.project_id
103-
dataset_id = "gcp_lakehouse_ds"
104-
friendly_name = "My gcp_lakehouse Dataset"
105-
description = "My gcp_lakehouse Dataset with tables"
106-
location = var.region
107-
labels = var.labels
108-
depends_on = [time_sleep.wait_after_adding_eventarc_svc_agent]
109-
}
110-
111-
112-
resource "google_bigquery_routine" "create_view_ecommerce" {
113-
project = module.project-services.project_id
114-
dataset_id = google_bigquery_dataset.gcp_lakehouse_ds.dataset_id
115-
routine_id = "create_view_ecommerce"
116-
routine_type = "PROCEDURE"
117-
language = "SQL"
118-
definition_body = file("${path.module}/assets/sql/view_ecommerce.sql")
119-
}
120-
121-
# # Create a BigQuery connection
122-
resource "google_bigquery_connection" "gcp_lakehouse_connection" {
123-
project = module.project-services.project_id
124-
connection_id = "gcp_lakehouse_connection"
125-
location = var.region
126-
friendly_name = "gcp lakehouse storage bucket connection"
127-
cloud_resource {}
128-
depends_on = [time_sleep.wait_after_adding_eventarc_svc_agent]
129-
}
130-
131-
132-
133-
## This grants permissions to the service account of the connection created in the last step.
134-
resource "google_project_iam_member" "connectionPermissionGrant" {
135-
project = module.project-services.project_id
136-
role = "roles/storage.objectViewer"
137-
member = format("serviceAccount:%s", google_bigquery_connection.gcp_lakehouse_connection.cloud_resource[0].service_account_id)
138-
}
139-
140-
#set up workflows svg acct
141-
resource "google_service_account" "workflows_sa" {
142-
project = module.project-services.project_id
143-
account_id = "workflows-sa"
144-
display_name = "Workflows Service Account"
145-
}
146-
147-
#give workflows_sa bq access
148-
resource "google_project_iam_member" "workflows_sa_bq_read" {
149-
project = module.project-services.project_id
150-
role = "roles/bigquery.jobUser"
151-
member = "serviceAccount:${google_service_account.workflows_sa.email}"
152-
153-
depends_on = [
154-
google_service_account.workflows_sa
155-
]
156-
}
157-
158-
resource "google_project_iam_member" "workflows_sa_log_writer" {
159-
project = module.project-services.project_id
160-
role = "roles/logging.logWriter"
161-
member = "serviceAccount:${google_service_account.workflows_sa.email}"
162-
163-
depends_on = [
164-
google_service_account.workflows_sa
165-
]
166-
}
167-
168-
169-
resource "google_workflows_workflow" "workflow_bqml" {
170-
name = "workflow-bqml-create"
171-
project = module.project-services.project_id
172-
region = "us-central1"
173-
description = "Create BQML Model"
174-
service_account = google_service_account.workflows_sa.email
175-
source_contents = file("${path.module}/assets/yaml/workflow_bqml.yaml")
176-
depends_on = [google_project_iam_member.workflows_sa_bq_read]
177-
178-
179-
}
180-
181-
resource "google_workflows_workflow" "workflow_bucket_copy" {
182-
name = "workflow-bucket-copy"
183-
project = module.project-services.project_id
184-
region = "us-central1"
185-
description = "Copy data files from public bucket to solution project"
186-
service_account = google_service_account.workflows_sa.email
187-
source_contents = file("${path.module}/assets/yaml/bucket_copy.yaml")
188-
depends_on = [google_project_iam_member.workflows_sa_bq_read]
189-
190-
191-
}
192-
193-
resource "google_workflows_workflow" "workflows_create_gcp_biglake_tables" {
194-
name = "workflow-create-gcp-biglake-tables"
195-
project = module.project-services.project_id
196-
region = "us-central1"
197-
description = "create gcp biglake tables_18"
198-
service_account = google_service_account.workflows_sa.email
199-
source_contents = templatefile("${path.module}/assets/yaml/workflow_create_gcp_lakehouse_tables.yaml", {
200-
data_analyst_user = google_service_account.data_analyst_user.email,
201-
marketing_user = google_service_account.marketing_user.email
202-
})
203-
87+
#random id
88+
resource "random_id" "id" {
89+
byte_length = 4
20490
}
20591

20692
# # Set up the provisioning bucketstorage bucket
@@ -282,15 +168,44 @@ data "http" "call_workflows_create_gcp_biglake_tables_run" {
282168
Accept = "application/json"
283169
Authorization = "Bearer ${data.google_client_config.current.access_token}" }
284170
depends_on = [
285-
google_workflows_workflow.workflow,
286-
google_workflows_workflow.workflow_bqml,
287-
google_workflows_workflow.workflow_create_gcp_lakehouse_tables,
288-
google_workflows_workflow.workflow_bucket_copy,
289-
]
171+
module.project-services,
172+
google_storage_bucket.provisioning_bucket,
173+
google_storage_bucket.destination_bucket,
174+
google_project_service_identity.workflows,
175+
google_service_account.workflows_sa,
176+
google_project_iam_member.workflow_service_account_invoke_role,
177+
google_project_iam_member.workflows_sa_bq_data,
178+
google_project_iam_member.workflows_sa_gcs_admin,
179+
google_project_iam_member.workflows_sa_bq_resource_mgr,
180+
google_project_iam_member.workflow_service_account_token_role,
181+
google_project_iam_member.workflows_sa_bq_connection,
182+
google_project_iam_member.workflows_sa_bq_read,
183+
google_project_iam_member.workflows_sa_log_writer,
184+
google_project_iam_member.workflow_service_account_dataproc_role,
185+
google_project_iam_member.workflow_service_account_bqadmin,
186+
google_bigquery_dataset.gcp_lakehouse_ds,
187+
google_bigquery_connection.gcp_lakehouse_connection,
188+
google_project_iam_member.connectionPermissionGrant,
189+
google_workflows_workflow.workflows_create_gcp_biglake_tables,
190+
data.google_storage_project_service_account.gcs_account
191+
]
290192
}
291193

292-
data "http" "call_workflows_bucket_copy_run" {
293-
url = "https://workflowexecutions.googleapis.com/v1/projects/${module.project-services.project_id}/locations/${var.region}/workflows/${google_workflows_workflow.workflow_bucket_copy.name}/executions"
194+
resource "time_sleep" "wait_after_all_workflows" {
195+
create_duration = "30s"
196+
depends_on = [data.http.call_workflows_bucket_copy_run,
197+
data.http.call_workflows_create_gcp_biglake_tables_run,
198+
data.http.call_workflows_create_iceberg_table,
199+
data.http.call_workflows_create_views_and_others
200+
]
201+
}
202+
#execute workflows
203+
data "google_client_config" "current" {
204+
}
205+
provider "http" {
206+
}
207+
data "http" "call_workflows_create_gcp_biglake_tables_run" {
208+
url = "https://workflowexecutions.googleapis.com/v1/projects/${module.project-services.project_id}/locations/${var.region}/workflows/${google_workflows_workflow.workflows_create_gcp_biglake_tables.name}/executions"
294209
method = "POST"
295210
request_headers = {
296211
Accept = "application/json"

0 commit comments

Comments
 (0)