Skip to content

Commit

Permalink
Remove unnecessary update API call for client-side-only field changes…
Browse files Browse the repository at this point in the history
… in google_bigquery_table (GoogleCloudPlatform#12174)
  • Loading branch information
wj-chen authored Oct 30, 2024
1 parent 3cca7aa commit 5b23601
Show file tree
Hide file tree
Showing 2 changed files with 163 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -1903,6 +1903,19 @@ type TableReference struct {
}

func resourceBigQueryTableUpdate(d *schema.ResourceData, meta interface{}) error {
// If only client-side fields were modified, short-circuit the Update function to avoid sending an update API request.
clientSideFields := map[string]bool{"deletion_protection": true}
clientSideOnly := true
for field := range ResourceBigQueryTable().Schema {
if d.HasChange(field) && !clientSideFields[field] {
clientSideOnly = false
break
}
}
if clientSideOnly {
return resourceBigQueryTableRead(d, meta)
}

config := meta.(*transport_tpg.Config)
userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent)
if err != nil {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,72 @@ func TestAccBigQueryTable_Basic(t *testing.T) {
})
}

func TestAccBigQueryTable_OnlyDeletionProtectionUpdate(t *testing.T) {
t.Parallel()

datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10))
tableID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10))

acctest.VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryTableBasicSchemaWithDeletionProtection(datasetID, tableID),
},
{
ResourceName: "google_bigquery_table.test",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"deletion_protection"},
},
{
Config: testAccBigQueryTableBasicSchema(datasetID, tableID),
},
{
ResourceName: "google_bigquery_table.test",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"deletion_protection"},
},
},
})
}

func TestAccBigQueryTable_OnlyNestedFieldUpdate(t *testing.T) {
t.Parallel()

datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10))
tableID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10))

acctest.VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryTableTimePartitioningWithExpirationMs(datasetID, tableID, 1000),
},
{
ResourceName: "google_bigquery_table.test",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"deletion_protection"},
},
{
Config: testAccBigQueryTableTimePartitioningWithExpirationMs(datasetID, tableID, 2000),
},
{
ResourceName: "google_bigquery_table.test",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"deletion_protection"},
},
},
})
}

func TestAccBigQueryTable_DropColumns(t *testing.T) {
t.Parallel()

Expand Down Expand Up @@ -1818,6 +1884,30 @@ EOH
`, datasetID, tableID)
}

func testAccBigQueryTableBasicSchemaWithDeletionProtection(datasetID, tableID string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "%s"
}
resource "google_bigquery_table" "test" {
deletion_protection = true
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
schema = <<EOH
[
{
"name": "id",
"type": "INTEGER"
}
]
EOH
}
`, datasetID, tableID)
}

func testAccBigQueryTableBasicSchemaWithPolicyTags(datasetID, tableID, projectID string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
Expand Down Expand Up @@ -1988,6 +2078,65 @@ EOH
`, datasetID, tableID, partitioningType)
}

func testAccBigQueryTableTimePartitioningWithExpirationMs(datasetID, tableID string, expirationMs int) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "%s"
}
resource "google_bigquery_table" "test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
time_partitioning {
type = "DAY"
field = "ts"
expiration_ms = %d
}
require_partition_filter = true
clustering = ["some_int", "some_string"]
schema = <<EOH
[
{
"name": "ts",
"type": "TIMESTAMP"
},
{
"name": "some_string",
"type": "STRING"
},
{
"name": "some_int",
"type": "INTEGER"
},
{
"name": "city",
"type": "RECORD",
"fields": [
{
"name": "id",
"type": "INTEGER"
},
{
"name": "coord",
"type": "RECORD",
"fields": [
{
"name": "lon",
"type": "FLOAT"
}
]
}
]
}
]
EOH
}
`, datasetID, tableID, expirationMs)
}

func testAccBigQueryTableTimePartitioningDropColumns(datasetID, tableID string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
Expand Down Expand Up @@ -4018,7 +4167,7 @@ func testAccBigQueryTableTableConstraintsUpdate(projectID, datasetID, tableID_pk
}
resource "google_bigquery_table" "table_pk" {
deletion_protection = false
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.foo.dataset_id
Expand Down

0 comments on commit 5b23601

Please sign in to comment.