Skip to content

Commit f5acf8e

Browse files
authored
fix: rolling back PHS creation in deployment (#105)
1 parent e854da8 commit f5acf8e

3 files changed

Lines changed: 2 additions & 39 deletions

File tree

dataproc.tf

Lines changed: 0 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -103,30 +103,3 @@ resource "google_project_iam_member" "bq_connection_iam_biglake" {
103103
role = "roles/biglake.admin"
104104
member = "serviceAccount:${google_bigquery_connection.ds_connection.cloud_resource[0].service_account_id}"
105105
}
106-
107-
resource "google_dataproc_cluster" "phs" {
108-
name = "gcp-${var.use_case_short}-phs-${random_id.id.hex}"
109-
project = module.project-services.project_id
110-
region = var.region
111-
cluster_config {
112-
staging_bucket = google_storage_bucket.phs-staging-bucket.name
113-
temp_bucket = google_storage_bucket.phs-temp-bucket.name
114-
gce_cluster_config {
115-
service_account = google_service_account.dataproc_service_account.email
116-
subnetwork = google_compute_subnetwork.subnet.name
117-
}
118-
software_config {
119-
override_properties = {
120-
"dataproc:dataproc.allow.zero.workers" = "true"
121-
"spark:spark.history.fs.logDirectory" = "gs://${google_storage_bucket.spark-log-directory.name}/phs/*/spark-job-history"
122-
}
123-
}
124-
endpoint_config {
125-
enable_http_port_access = "true"
126-
}
127-
}
128-
129-
depends_on = [
130-
google_project_iam_member.dataproc_sa_roles
131-
]
132-
}

test/integration/analytics_lakehouse/analytics_lakehouse_test.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,8 @@ func TestAnalyticsLakehouse(t *testing.T) {
4747

4848
verifyNoVMs := func() (bool, error) {
4949
currentComputeInstances := gcloud.Runf(t, "compute instances list --project %s", projectID).Array()
50-
// There should only be 1 compute instance (Dataproc PHS). Wait to destroy if other instances exist.
51-
if len(currentComputeInstances) > 1 {
50+
// If compute instances is greater than 0, wait and check again until 0 to complete destroy
51+
if len(currentComputeInstances) > 0 {
5252
return true, nil
5353
}
5454
return false, nil

workflows.tf

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -170,13 +170,3 @@ resource "time_sleep" "wait_after_all_workflows" {
170170
data.http.call_workflows_project_setup,
171171
]
172172
}
173-
174-
# Stop the PHS cluster after creation since it costs too much.
175-
# tflint-ignore: terraform_unused_declarations
176-
data "http" "call_stop_cluster" {
177-
url = "https://dataproc.googleapis.com/v1/projects/${module.project-services.project_id}/regions/${var.region}/clusters/${google_dataproc_cluster.phs.name}:stop"
178-
method = "POST"
179-
request_headers = {
180-
Accept = "application/json"
181-
Authorization = "Bearer ${data.google_client_config.current.access_token}" }
182-
}

0 commit comments

Comments
 (0)