Skip to content

Commit c3acecd

Browse files
Add replacing with immutable parameter when S3 is set in DataTransferConfig (#8839) (#15723)
* add to recreate config when data_source_id is amanzon_s3 * update bq dts acctest Signed-off-by: Modular Magician <[email protected]>
1 parent 6a09249 commit c3acecd

File tree

3 files changed

+162
-6
lines changed

3 files changed

+162
-6
lines changed

.changelog/8839.txt

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
```release-note:bug
2+
google_bigquery_data_transfer: fixed an error from updating immutable parameters of `google_bigquery_data_transfer_config`
3+
```

google/services/bigquerydatatransfer/resource_bigquery_data_transfer_config.go

+20-5
Original file line numberDiff line numberDiff line change
@@ -44,16 +44,17 @@ func sensitiveParamCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, v
4444
return nil
4545
}
4646

47-
// This customizeDiff is to use ForceNew for params fields data_path_template and
48-
// destination_table_name_template only if the value of "data_source_id" is "google_cloud_storage".
47+
// This customizeDiff is to use ForceNew for params fields data_path_template or data_path and
48+
// destination_table_name_template only if the value of "data_source_id" is "google_cloud_storage" or "amazon_s3".
4949
func ParamsCustomizeDiffFunc(diff tpgresource.TerraformResourceDiff) error {
5050
old, new := diff.GetChange("params")
5151
dsId := diff.Get("data_source_id").(string)
5252
oldParams := old.(map[string]interface{})
5353
newParams := new.(map[string]interface{})
5454
var err error
5555

56-
if dsId == "google_cloud_storage" {
56+
switch dsId {
57+
case "google_cloud_storage":
5758
if oldParams["data_path_template"] != nil && newParams["data_path_template"] != nil && oldParams["data_path_template"].(string) != newParams["data_path_template"].(string) {
5859
err = diff.ForceNew("params")
5960
if err != nil {
@@ -69,11 +70,25 @@ func ParamsCustomizeDiffFunc(diff tpgresource.TerraformResourceDiff) error {
6970
}
7071
return nil
7172
}
72-
}
73+
case "amazon_s3":
74+
if oldParams["data_path"] != nil && newParams["data_path"] != nil && oldParams["data_path"].(string) != newParams["data_path"].(string) {
75+
err = diff.ForceNew("params")
76+
if err != nil {
77+
return fmt.Errorf("ForceNew failed for params, old - %v and new - %v", oldParams, newParams)
78+
}
79+
return nil
80+
}
7381

82+
if oldParams["destination_table_name_template"] != nil && newParams["destination_table_name_template"] != nil && oldParams["destination_table_name_template"].(string) != newParams["destination_table_name_template"].(string) {
83+
err = diff.ForceNew("params")
84+
if err != nil {
85+
return fmt.Errorf("ForceNew failed for params, old - %v and new - %v", oldParams, newParams)
86+
}
87+
return nil
88+
}
89+
}
7490
return nil
7591
}
76-
7792
func paramsCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, v interface{}) error {
7893
return ParamsCustomizeDiffFunc(diff)
7994
}

google/services/bigquerydatatransfer/resource_bigquery_data_transfer_config_test.go

+139-1
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ import (
1616
transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport"
1717
)
1818

19-
func TestBigqueryDataTransferConfig_resourceBigqueryDTCParamsCustomDiffFuncForceNew(t *testing.T) {
19+
func TestBigqueryDataTransferConfig_resourceBigqueryDTCParamsCustomDiffFuncForceNewWhenGoogleCloudStorage(t *testing.T) {
2020
t.Parallel()
2121

2222
cases := map[string]struct {
@@ -154,6 +154,144 @@ func TestBigqueryDataTransferConfig_resourceBigqueryDTCParamsCustomDiffFuncForce
154154
}
155155
}
156156

157+
func TestBigqueryDataTransferConfig_resourceBigqueryDTCParamsCustomDiffFuncForceNewWhenAmazonS3(t *testing.T) {
158+
t.Parallel()
159+
160+
cases := map[string]struct {
161+
before map[string]interface{}
162+
after map[string]interface{}
163+
forcenew bool
164+
}{
165+
"changing_data_path": {
166+
before: map[string]interface{}{
167+
"data_source_id": "amazon_s3",
168+
"params": map[string]interface{}{
169+
"data_path": "s3://s3-bucket-temp/*.json",
170+
"destination_table_name_template": "table-old",
171+
"file_format": "JSON",
172+
"max_bad_records": 10,
173+
"write_disposition": "WRITE_APPEND",
174+
},
175+
},
176+
after: map[string]interface{}{
177+
"data_source_id": "amazon_s3",
178+
"params": map[string]interface{}{
179+
"data_path": "s3://s3-bucket-temp-new/*.json",
180+
"destination_table_name_template": "table-old",
181+
"file_format": "JSON",
182+
"max_bad_records": 10,
183+
"write_disposition": "WRITE_APPEND",
184+
},
185+
},
186+
forcenew: true,
187+
},
188+
"changing_destination_table_name_template": {
189+
before: map[string]interface{}{
190+
"data_source_id": "amazon_s3",
191+
"params": map[string]interface{}{
192+
"data_path": "s3://s3-bucket-temp/*.json",
193+
"destination_table_name_template": "table-old",
194+
"file_format": "JSON",
195+
"max_bad_records": 10,
196+
"write_disposition": "WRITE_APPEND",
197+
},
198+
},
199+
after: map[string]interface{}{
200+
"data_source_id": "amazon_s3",
201+
"params": map[string]interface{}{
202+
"data_path": "s3://s3-bucket-temp/*.json",
203+
"destination_table_name_template": "table-new",
204+
"file_format": "JSON",
205+
"max_bad_records": 10,
206+
"write_disposition": "WRITE_APPEND",
207+
},
208+
},
209+
forcenew: true,
210+
},
211+
"changing_non_force_new_fields": {
212+
before: map[string]interface{}{
213+
"data_source_id": "amazon_s3",
214+
"params": map[string]interface{}{
215+
"data_path": "s3://s3-bucket-temp/*.json",
216+
"destination_table_name_template": "table-old",
217+
"file_format": "JSON",
218+
"max_bad_records": 10,
219+
"write_disposition": "WRITE_APPEND",
220+
},
221+
},
222+
after: map[string]interface{}{
223+
"data_source_id": "amazon_s3",
224+
"params": map[string]interface{}{
225+
"data_path": "s3://s3-bucket-temp/*.json",
226+
"destination_table_name_template": "table-old",
227+
"file_format": "JSON",
228+
"max_bad_records": 1000,
229+
"write_disposition": "APPEND",
230+
},
231+
},
232+
forcenew: false,
233+
},
234+
"changing_destination_table_name_template_for_different_data_source_id": {
235+
before: map[string]interface{}{
236+
"data_source_id": "scheduled_query",
237+
"params": map[string]interface{}{
238+
"destination_table_name_template": "table-old",
239+
"query": "SELECT 1 AS a",
240+
"write_disposition": "WRITE_APPEND",
241+
},
242+
},
243+
after: map[string]interface{}{
244+
"data_source_id": "scheduled_query",
245+
"params": map[string]interface{}{
246+
"destination_table_name_template": "table-new",
247+
"query": "SELECT 1 AS a",
248+
"write_disposition": "WRITE_APPEND",
249+
},
250+
},
251+
forcenew: false,
252+
},
253+
"changing_data_path_template_for_different_data_source_id": {
254+
before: map[string]interface{}{
255+
"data_source_id": "scheduled_query",
256+
"params": map[string]interface{}{
257+
"data_path": "s3://s3-bucket-temp/*.json",
258+
"query": "SELECT 1 AS a",
259+
"write_disposition": "WRITE_APPEND",
260+
},
261+
},
262+
after: map[string]interface{}{
263+
"data_source_id": "scheduled_query",
264+
"params": map[string]interface{}{
265+
"data_path": "s3://s3-bucket-temp-new/*.json",
266+
"query": "SELECT 1 AS a",
267+
"write_disposition": "WRITE_APPEND",
268+
},
269+
},
270+
forcenew: false,
271+
},
272+
}
273+
274+
for tn, tc := range cases {
275+
d := &tpgresource.ResourceDiffMock{
276+
Before: map[string]interface{}{
277+
"params": tc.before["params"],
278+
"data_source_id": tc.before["data_source_id"],
279+
},
280+
After: map[string]interface{}{
281+
"params": tc.after["params"],
282+
"data_source_id": tc.after["data_source_id"],
283+
},
284+
}
285+
err := bigquerydatatransfer.ParamsCustomizeDiffFunc(d)
286+
if err != nil {
287+
t.Errorf("failed, expected no error but received - %s for the condition %s", err, tn)
288+
}
289+
if d.IsForceNew != tc.forcenew {
290+
t.Errorf("ForceNew not setup correctly for the condition-'%s', expected:%v; actual:%v", tn, tc.forcenew, d.IsForceNew)
291+
}
292+
}
293+
}
294+
157295
// The service account TF uses needs the permission granted in the configs
158296
// but it will get deleted by parallel tests, so they need to be run serially.
159297
func TestAccBigqueryDataTransferConfig(t *testing.T) {

0 commit comments

Comments
 (0)