Skip to content

Commit

Permalink
Fix Failing test TestAccSecurityCenterV2ProjectBigQueryExportConfig_b…
Browse files Browse the repository at this point in the history
…asic + others (GoogleCloudPlatform#12334)
  • Loading branch information
vijaykanthm authored and amanMahendroo committed Dec 17, 2024
1 parent d16c33e commit 642635a
Show file tree
Hide file tree
Showing 5 changed files with 96 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ func TestAccSecurityCenterOrganizationBigQueryExportConfig_basic(t *testing.T) {
t.Parallel()

randomSuffix := acctest.RandString(t, 10)
dataset_id := "tf_test_" + randomSuffix
datasetID := "tf_test_" + randomSuffix
orgID := envvar.GetTestOrgFromEnv(t)

context := map[string]interface{}{
"org_id": orgID,
"random_suffix": randomSuffix,
"dataset_id": dataset_id,
"dataset_id": datasetID,
"big_query_export_id": "tf-test-export-" + randomSuffix,
"name": fmt.Sprintf("organizations/%s/bigQueryExports/%s",
orgID, "tf-test-export-"+randomSuffix),
Expand Down Expand Up @@ -65,6 +65,7 @@ resource "google_bigquery_dataset" "default" {
location = "US"
default_table_expiration_ms = 3600000
default_partition_expiration_ms = null
delete_contents_on_destroy = true
labels = {
env = "default"
Expand All @@ -77,7 +78,7 @@ resource "google_bigquery_dataset" "default" {
resource "time_sleep" "wait_1_minute" {
depends_on = [google_bigquery_dataset.default]
create_duration = "3m"
create_duration = "6m"
}
resource "google_scc_organization_scc_big_query_export" "default" {
Expand All @@ -90,6 +91,10 @@ resource "google_scc_organization_scc_big_query_export" "default" {
depends_on = [time_sleep.wait_1_minute]
}
resource "time_sleep" "wait_for_cleanup" {
create_duration = "6m"
depends_on = [google_scc_organization_scc_big_query_export.default]
}
`, context)
}

Expand All @@ -103,6 +108,7 @@ resource "google_bigquery_dataset" "default" {
location = "US"
default_table_expiration_ms = 3600000
default_partition_expiration_ms = null
delete_contents_on_destroy = true
labels = {
env = "default"
Expand All @@ -113,12 +119,24 @@ resource "google_bigquery_dataset" "default" {
}
}
resource "time_sleep" "wait_1_minute" {
depends_on = [google_bigquery_dataset.default]
create_duration = "6m"
}
resource "google_scc_organization_scc_big_query_export" "default" {
big_query_export_id = "%{big_query_export_id}"
organization = "%{org_id}"
dataset = google_bigquery_dataset.default.id
description = "SCC Findings Big Query Export Update"
filter = "state=\"ACTIVE\" AND NOT mute=\"MUTED\""
depends_on = [time_sleep.wait_1_minute]
}
resource "time_sleep" "wait_for_cleanup" {
create_duration = "6m"
depends_on = [google_scc_organization_scc_big_query_export.default]
}
`, context)
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@ func TestAccSecurityCenterProjectBigQueryExportConfig_basic(t *testing.T) {
t.Parallel()

randomSuffix := acctest.RandString(t, 10)
dataset_id := "tf_test_" + randomSuffix
datasetID := "tf_test_" + randomSuffix
orgID := envvar.GetTestOrgFromEnv(t)

context := map[string]interface{}{
"org_id": orgID,
"random_suffix": randomSuffix,
"dataset_id": dataset_id,
"dataset_id": datasetID,
"big_query_export_id": "tf-test-export-" + randomSuffix,
"project": envvar.GetTestProjectFromEnv(),
}
Expand Down Expand Up @@ -63,6 +63,7 @@ resource "google_bigquery_dataset" "default" {
location = "US"
default_table_expiration_ms = 3600000
default_partition_expiration_ms = null
delete_contents_on_destroy = true
labels = {
env = "default"
Expand All @@ -75,7 +76,7 @@ resource "google_bigquery_dataset" "default" {
resource "time_sleep" "wait_x_minutes" {
depends_on = [google_bigquery_dataset.default]
create_duration = "3m"
create_duration = "6m"
# need to wait for destruction due to
# 'still in use' error from api
destroy_duration = "1m"
Expand All @@ -91,6 +92,11 @@ resource "google_scc_project_scc_big_query_export" "default" {
depends_on = [time_sleep.wait_x_minutes]
}
resource "time_sleep" "wait_for_cleanup" {
create_duration = "6m"
depends_on = [google_scc_project_scc_big_query_export.default]
}
`, context)
}

Expand All @@ -104,6 +110,7 @@ resource "google_bigquery_dataset" "default" {
location = "US"
default_table_expiration_ms = 3600000
default_partition_expiration_ms = null
delete_contents_on_destroy = true
labels = {
env = "default"
Expand All @@ -114,12 +121,27 @@ resource "google_bigquery_dataset" "default" {
}
}
resource "time_sleep" "wait_x_minutes" {
depends_on = [google_bigquery_dataset.default]
create_duration = "6m"
# need to wait for destruction due to
# 'still in use' error from api
destroy_duration = "1m"
}
resource "google_scc_project_scc_big_query_export" "default" {
big_query_export_id = "%{big_query_export_id}"
project = "%{project}"
dataset = google_bigquery_dataset.default.id
description = "SCC Findings Big Query Export Update"
filter = "state=\"ACTIVE\" AND NOT mute=\"MUTED\""
depends_on = [time_sleep.wait_x_minutes]
}
resource "time_sleep" "wait_for_cleanup" {
create_duration = "6m"
depends_on = [google_scc_project_scc_big_query_export.default]
}
`, context)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ func TestAccSecurityCenterV2OrganizationBigQueryExportConfig_basic(t *testing.T)
t.Parallel()

randomSuffix := acctest.RandString(t, 10)
dataset_id := "tf_test_" + randomSuffix
datasetID := "tf_test_" + randomSuffix
orgID := envvar.GetTestOrgFromEnv(t)

context := map[string]interface{}{
"org_id": orgID,
"random_suffix": randomSuffix,
"dataset_id": dataset_id,
"dataset_id": datasetID,
"big_query_export_id": "tf-test-export-" + randomSuffix,
"name": fmt.Sprintf("organizations/%s/locations/global/bigQueryExports/%s",
orgID, "tf-test-export-"+randomSuffix),
Expand Down Expand Up @@ -94,7 +94,7 @@ resource "google_scc_v2_organization_scc_big_query_export" "default" {
}
resource "time_sleep" "wait_for_cleanup" {
create_duration = "3m"
create_duration = "6m"
depends_on = [google_scc_v2_organization_scc_big_query_export.default]
}
`, context)
Expand All @@ -121,6 +121,11 @@ resource "google_bigquery_dataset" "default" {
}
}
resource "time_sleep" "wait_1_minute" {
depends_on = [google_bigquery_dataset.default]
create_duration = "6m"
}
resource "google_scc_v2_organization_scc_big_query_export" "default" {
name = "%{name}"
big_query_export_id = "%{big_query_export_id}"
Expand All @@ -129,10 +134,12 @@ resource "google_scc_v2_organization_scc_big_query_export" "default" {
location = "global"
description = "SCC Findings Big Query Export Update"
filter = "state=\"ACTIVE\" AND NOT mute=\"MUTED\""
depends_on = [time_sleep.wait_1_minute]
}
resource "time_sleep" "wait_for_cleanup" {
create_duration = "3m"
create_duration = "6m"
depends_on = [google_scc_v2_organization_scc_big_query_export.default]
}
`, context)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ func TestAccSecurityCenterV2OrganizationBigQueryExportsConfig_basic(t *testing.T
t.Parallel()

randomSuffix := acctest.RandString(t, 10)
dataset_id := "tf_test_" + randomSuffix
datasetID := "tf_test_" + randomSuffix
orgID := envvar.GetTestOrgFromEnv(t)

context := map[string]interface{}{
"org_id": orgID,
"random_suffix": randomSuffix,
"dataset_id": dataset_id,
"dataset_id": datasetID,
"big_query_export_id": "tf-test-export-" + randomSuffix,
"name": fmt.Sprintf("organizations/%s/locations/global/bigQueryExports/%s",
orgID, "tf-test-export-"+randomSuffix),
Expand Down Expand Up @@ -94,7 +94,7 @@ resource "google_scc_v2_organization_scc_big_query_exports" "default" {
}
resource "time_sleep" "wait_for_cleanup" {
create_duration = "3m"
create_duration = "6m"
depends_on = [google_scc_v2_organization_scc_big_query_exports.default]
}
`, context)
Expand All @@ -121,6 +121,11 @@ resource "google_bigquery_dataset" "default" {
}
}
resource "time_sleep" "wait_1_minute" {
depends_on = [google_bigquery_dataset.default]
create_duration = "6m"
}
resource "google_scc_v2_organization_scc_big_query_exports" "default" {
name = "%{name}"
big_query_export_id = "%{big_query_export_id}"
Expand All @@ -129,10 +134,12 @@ resource "google_scc_v2_organization_scc_big_query_exports" "default" {
location = "global"
description = "SCC Findings Big Query Export Update"
filter = "state=\"ACTIVE\" AND NOT mute=\"MUTED\""
depends_on = [time_sleep.wait_1_minute]
}
resource "time_sleep" "wait_for_cleanup" {
create_duration = "3m"
create_duration = "6m"
depends_on = [google_scc_v2_organization_scc_big_query_exports.default]
}
`, context)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ func TestAccSecurityCenterV2ProjectBigQueryExportConfig_basic(t *testing.T) {
t.Parallel()

randomSuffix := acctest.RandString(t, 10)
dataset_id := "tf_test_" + randomSuffix
datasetID := "tf_test_" + randomSuffix
orgID := envvar.GetTestOrgFromEnv(t)

context := map[string]interface{}{
"org_id": orgID,
"random_suffix": randomSuffix,
"dataset_id": dataset_id,
"dataset_id": datasetID,
"big_query_export_id": "tf-test-export-" + randomSuffix,
"name": fmt.Sprintf("projects/%s/locations/global/bigQueryExports/%s",
envvar.GetTestProjectFromEnv(), "tf-test-export-"+randomSuffix),
Expand Down Expand Up @@ -66,6 +66,7 @@ resource "google_bigquery_dataset" "default" {
location = "US"
default_table_expiration_ms = 3600000
default_partition_expiration_ms = null
delete_contents_on_destroy = true
labels = {
env = "default"
Expand All @@ -78,7 +79,10 @@ resource "google_bigquery_dataset" "default" {
resource "time_sleep" "wait_1_minute" {
depends_on = [google_bigquery_dataset.default]
create_duration = "3m"
create_duration = "6m"
# need to wait for destruction due to
# 'still in use' error from api
destroy_duration = "1m"
}
resource "google_scc_v2_project_scc_big_query_export" "default" {
Expand All @@ -92,6 +96,11 @@ resource "google_scc_v2_project_scc_big_query_export" "default" {
depends_on = [time_sleep.wait_1_minute]
}
resource "time_sleep" "wait_for_cleanup" {
create_duration = "6m"
depends_on = [google_scc_v2_project_scc_big_query_export.default]
}
`, context)
}

Expand All @@ -105,6 +114,7 @@ resource "google_bigquery_dataset" "default" {
location = "US"
default_table_expiration_ms = 3600000
default_partition_expiration_ms = null
delete_contents_on_destroy = true
labels = {
env = "default"
Expand All @@ -115,6 +125,14 @@ resource "google_bigquery_dataset" "default" {
}
}
resource "time_sleep" "wait_1_minute" {
depends_on = [google_bigquery_dataset.default]
create_duration = "6m"
# need to wait for destruction due to
# 'still in use' error from api
destroy_duration = "1m"
}
resource "google_scc_v2_project_scc_big_query_export" "default" {
big_query_export_id = "%{big_query_export_id}"
project = "%{project}"
Expand All @@ -123,6 +141,13 @@ resource "google_scc_v2_project_scc_big_query_export" "default" {
description = "SCC Findings Big Query Export Update"
filter = "state=\"ACTIVE\" AND NOT mute=\"MUTED\""
depends_on = [time_sleep.wait_1_minute]
}
resource "time_sleep" "wait_for_cleanup" {
create_duration = "6m"
depends_on = [google_scc_v2_project_scc_big_query_export.default]
}
`, context)
Expand Down

0 comments on commit 642635a

Please sign in to comment.