Skip to content

Commit 538b863

Browse files
Added customizeDiff for a couple of subfields of params field in google_bigquery_data_transfer_config (#6784) (#13137)
* Added customize diff for params field in google_bigquery_data_transfer_config * Added test cases for params field for google_bigquery_data_transfer_config * Added unit tests to cover ForceNew behaviour of params field * Added handling for customizeDiff in google_bigquery_data_transfer_config * Added comments for parmasCustomizeDiff function in google_bigquery_data_transfer_config * Added test case for different data_source_id in resource_bigquery_data_transfer_config * Updated test case for different data_source_id in resource_bigquery_data_transfer_config * Updated error message for resource_bigquery_data_transfer_config_test.go * Updated test case for resource_bigquery_data_transfer_config_test.go Signed-off-by: Modular Magician <[email protected]> Signed-off-by: Modular Magician <[email protected]>
1 parent 50e1f7d commit 538b863

File tree

3 files changed

+243
-1
lines changed

3 files changed

+243
-1
lines changed

.changelog/6784.txt

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
```release-note:enhancement
2+
bigquerydatatransfer: recreate `google_bigquery_data_transfer_config` for Cloud Storage transfers when immutable params `data_path_template` and `destination_table_name_template` are changed
3+
```

google/resource_bigquery_data_transfer_config.go

+36-1
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ import (
2222
"strings"
2323
"time"
2424

25+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff"
2526
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
2627
)
2728

@@ -38,6 +39,40 @@ func sensitiveParamCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, v
3839
return nil
3940
}
4041

42+
// This customizeDiff is to use ForceNew for params fields data_path_template and
43+
// destination_table_name_template only if the value of "data_source_id" is "google_cloud_storage".
44+
func paramsCustomizeDiffFunc(diff TerraformResourceDiff) error {
45+
old, new := diff.GetChange("params")
46+
dsId := diff.Get("data_source_id").(string)
47+
oldParams := old.(map[string]interface{})
48+
newParams := new.(map[string]interface{})
49+
var err error
50+
51+
if dsId == "google_cloud_storage" {
52+
if oldParams["data_path_template"] != nil && newParams["data_path_template"] != nil && oldParams["data_path_template"].(string) != newParams["data_path_template"].(string) {
53+
err = diff.ForceNew("params")
54+
if err != nil {
55+
return fmt.Errorf("ForceNew failed for params, old - %v and new - %v", oldParams, newParams)
56+
}
57+
return nil
58+
}
59+
60+
if oldParams["destination_table_name_template"] != nil && newParams["destination_table_name_template"] != nil && oldParams["destination_table_name_template"].(string) != newParams["destination_table_name_template"].(string) {
61+
err = diff.ForceNew("params")
62+
if err != nil {
63+
return fmt.Errorf("ForceNew failed for params, old - %v and new - %v", oldParams, newParams)
64+
}
65+
return nil
66+
}
67+
}
68+
69+
return nil
70+
}
71+
72+
func paramsCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, v interface{}) error {
73+
return paramsCustomizeDiffFunc(diff)
74+
}
75+
4176
func resourceBigqueryDataTransferConfig() *schema.Resource {
4277
return &schema.Resource{
4378
Create: resourceBigqueryDataTransferConfigCreate,
@@ -55,7 +90,7 @@ func resourceBigqueryDataTransferConfig() *schema.Resource {
5590
Delete: schema.DefaultTimeout(20 * time.Minute),
5691
},
5792

58-
CustomizeDiff: sensitiveParamCustomizeDiff,
93+
CustomizeDiff: customdiff.All(sensitiveParamCustomizeDiff, paramsCustomizeDiff),
5994

6095
Schema: map[string]*schema.Schema{
6196
"data_source_id": {

google/resource_bigquery_data_transfer_config_test.go

+204
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,144 @@ import (
1010
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
1111
)
1212

13+
func TestBigqueryDataTransferConfig_resourceBigqueryDTCParamsCustomDiffFuncForceNew(t *testing.T) {
14+
t.Parallel()
15+
16+
cases := map[string]struct {
17+
before map[string]interface{}
18+
after map[string]interface{}
19+
forcenew bool
20+
}{
21+
"changing_data_path_template": {
22+
before: map[string]interface{}{
23+
"data_source_id": "google_cloud_storage",
24+
"params": map[string]interface{}{
25+
"data_path_template": "gs://bq-bucket-temp/*.json",
26+
"destination_table_name_template": "table-old",
27+
"file_format": "JSON",
28+
"max_bad_records": 10,
29+
"write_disposition": "APPEND",
30+
},
31+
},
32+
after: map[string]interface{}{
33+
"data_source_id": "google_cloud_storage",
34+
"params": map[string]interface{}{
35+
"data_path_template": "gs://bq-bucket-temp-new/*.json",
36+
"destination_table_name_template": "table-old",
37+
"file_format": "JSON",
38+
"max_bad_records": 10,
39+
"write_disposition": "APPEND",
40+
},
41+
},
42+
forcenew: true,
43+
},
44+
"changing_destination_table_name_template": {
45+
before: map[string]interface{}{
46+
"data_source_id": "google_cloud_storage",
47+
"params": map[string]interface{}{
48+
"data_path_template": "gs://bq-bucket-temp/*.json",
49+
"destination_table_name_template": "table-old",
50+
"file_format": "JSON",
51+
"max_bad_records": 10,
52+
"write_disposition": "APPEND",
53+
},
54+
},
55+
after: map[string]interface{}{
56+
"data_source_id": "google_cloud_storage",
57+
"params": map[string]interface{}{
58+
"data_path_template": "gs://bq-bucket-temp/*.json",
59+
"destination_table_name_template": "table-new",
60+
"file_format": "JSON",
61+
"max_bad_records": 10,
62+
"write_disposition": "APPEND",
63+
},
64+
},
65+
forcenew: true,
66+
},
67+
"changing_non_force_new_fields": {
68+
before: map[string]interface{}{
69+
"data_source_id": "google_cloud_storage",
70+
"params": map[string]interface{}{
71+
"data_path_template": "gs://bq-bucket-temp/*.json",
72+
"destination_table_name_template": "table-old",
73+
"file_format": "JSON",
74+
"max_bad_records": 10,
75+
"write_disposition": "APPEND",
76+
},
77+
},
78+
after: map[string]interface{}{
79+
"data_source_id": "google_cloud_storage",
80+
"params": map[string]interface{}{
81+
"data_path_template": "gs://bq-bucket-temp/*.json",
82+
"destination_table_name_template": "table-old",
83+
"file_format": "JSON",
84+
"max_bad_records": 1000,
85+
"write_disposition": "APPEND",
86+
},
87+
},
88+
forcenew: false,
89+
},
90+
"changing_destination_table_name_template_for_different_data_source_id": {
91+
before: map[string]interface{}{
92+
"data_source_id": "scheduled_query",
93+
"params": map[string]interface{}{
94+
"destination_table_name_template": "table-old",
95+
"query": "SELECT 1 AS a",
96+
"write_disposition": "WRITE_APPEND",
97+
},
98+
},
99+
after: map[string]interface{}{
100+
"data_source_id": "scheduled_query",
101+
"params": map[string]interface{}{
102+
"destination_table_name_template": "table-new",
103+
"query": "SELECT 1 AS a",
104+
"write_disposition": "WRITE_APPEND",
105+
},
106+
},
107+
forcenew: false,
108+
},
109+
"changing_data_path_template_for_different_data_source_id": {
110+
before: map[string]interface{}{
111+
"data_source_id": "scheduled_query",
112+
"params": map[string]interface{}{
113+
"data_path_template": "gs://bq-bucket/*.json",
114+
"query": "SELECT 1 AS a",
115+
"write_disposition": "WRITE_APPEND",
116+
},
117+
},
118+
after: map[string]interface{}{
119+
"data_source_id": "scheduled_query",
120+
"params": map[string]interface{}{
121+
"data_path_template": "gs://bq-bucket-new/*.json",
122+
"query": "SELECT 1 AS a",
123+
"write_disposition": "WRITE_APPEND",
124+
},
125+
},
126+
forcenew: false,
127+
},
128+
}
129+
130+
for tn, tc := range cases {
131+
d := &ResourceDiffMock{
132+
Before: map[string]interface{}{
133+
"params": tc.before["params"],
134+
"data_source_id": tc.before["data_source_id"],
135+
},
136+
After: map[string]interface{}{
137+
"params": tc.after["params"],
138+
"data_source_id": tc.after["data_source_id"],
139+
},
140+
}
141+
err := paramsCustomizeDiffFunc(d)
142+
if err != nil {
143+
t.Errorf("failed, expected no error but received - %s for the condition %s", err, tn)
144+
}
145+
if d.IsForceNew != tc.forcenew {
146+
t.Errorf("ForceNew not setup correctly for the condition-'%s', expected:%v; actual:%v", tn, tc.forcenew, d.IsForceNew)
147+
}
148+
}
149+
}
150+
13151
// The service account TF uses needs the permission granted in the configs
14152
// but it will get deleted by parallel tests, so they need to be run serially.
15153
func TestAccBigqueryDataTransferConfig(t *testing.T) {
@@ -19,6 +157,7 @@ func TestAccBigqueryDataTransferConfig(t *testing.T) {
19157
"service_account": testAccBigqueryDataTransferConfig_scheduledQuery_with_service_account,
20158
"no_destintation": testAccBigqueryDataTransferConfig_scheduledQuery_no_destination,
21159
"booleanParam": testAccBigqueryDataTransferConfig_copy_booleanParam,
160+
"update_params": testAccBigqueryDataTransferConfig_force_new_update_params,
22161
}
23162

24163
for name, tc := range testCases {
@@ -168,6 +307,45 @@ func testAccBigqueryDataTransferConfig_copy_booleanParam(t *testing.T) {
168307
})
169308
}
170309

310+
func testAccBigqueryDataTransferConfig_force_new_update_params(t *testing.T) {
311+
random_suffix := randString(t, 10)
312+
313+
vcrTest(t, resource.TestCase{
314+
PreCheck: func() { testAccPreCheck(t) },
315+
Providers: testAccProviders,
316+
CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroyProducer(t),
317+
Steps: []resource.TestStep{
318+
{
319+
Config: testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, "old", "old"),
320+
},
321+
{
322+
ResourceName: "google_bigquery_data_transfer_config.update_config",
323+
ImportState: true,
324+
ImportStateVerify: true,
325+
ImportStateVerifyIgnore: []string{"location"},
326+
},
327+
{
328+
Config: testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, "new", "old"),
329+
},
330+
{
331+
ResourceName: "google_bigquery_data_transfer_config.update_config",
332+
ImportState: true,
333+
ImportStateVerify: true,
334+
ImportStateVerifyIgnore: []string{"location"},
335+
},
336+
{
337+
Config: testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, "new", "new"),
338+
},
339+
{
340+
ResourceName: "google_bigquery_data_transfer_config.update_config",
341+
ImportState: true,
342+
ImportStateVerify: true,
343+
ImportStateVerifyIgnore: []string{"location"},
344+
},
345+
},
346+
})
347+
}
348+
171349
func testAccCheckBigqueryDataTransferConfigDestroyProducer(t *testing.T) func(s *terraform.State) error {
172350
return func(s *terraform.State) error {
173351
for name, rs := range s.RootModule().Resources {
@@ -369,3 +547,29 @@ resource "google_bigquery_data_transfer_config" "copy_config" {
369547
}
370548
`, random_suffix, random_suffix, random_suffix)
371549
}
550+
551+
func testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, path, table string) string {
552+
return fmt.Sprintf(`
553+
resource "google_bigquery_dataset" "dataset" {
554+
dataset_id = "tf_test_%s"
555+
friendly_name = "foo"
556+
description = "bar"
557+
location = "US"
558+
}
559+
560+
resource "google_bigquery_data_transfer_config" "update_config" {
561+
display_name = "tf-test-%s"
562+
data_source_id = "google_cloud_storage"
563+
destination_dataset_id = google_bigquery_dataset.dataset.dataset_id
564+
location = google_bigquery_dataset.dataset.location
565+
566+
params = {
567+
data_path_template = "gs://bq-bucket-%s-%s/*.json"
568+
destination_table_name_template = "the-table-%s-%s"
569+
file_format = "JSON"
570+
max_bad_records = 0
571+
write_disposition = "APPEND"
572+
}
573+
}
574+
`, random_suffix, random_suffix, random_suffix, path, random_suffix, table)
575+
}

0 commit comments

Comments
 (0)