@@ -16,7 +16,7 @@ import (
16
16
transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport"
17
17
)
18
18
19
- func TestBigqueryDataTransferConfig_resourceBigqueryDTCParamsCustomDiffFuncForceNew (t * testing.T ) {
19
+ func TestBigqueryDataTransferConfig_resourceBigqueryDTCParamsCustomDiffFuncForceNewWhenGoogleCloudStorage (t * testing.T ) {
20
20
t .Parallel ()
21
21
22
22
cases := map [string ]struct {
@@ -154,6 +154,144 @@ func TestBigqueryDataTransferConfig_resourceBigqueryDTCParamsCustomDiffFuncForce
154
154
}
155
155
}
156
156
157
+ func TestBigqueryDataTransferConfig_resourceBigqueryDTCParamsCustomDiffFuncForceNewWhenAmazonS3 (t * testing.T ) {
158
+ t .Parallel ()
159
+
160
+ cases := map [string ]struct {
161
+ before map [string ]interface {}
162
+ after map [string ]interface {}
163
+ forcenew bool
164
+ }{
165
+ "changing_data_path" : {
166
+ before : map [string ]interface {}{
167
+ "data_source_id" : "amazon_s3" ,
168
+ "params" : map [string ]interface {}{
169
+ "data_path" : "s3://s3-bucket-temp/*.json" ,
170
+ "destination_table_name_template" : "table-old" ,
171
+ "file_format" : "JSON" ,
172
+ "max_bad_records" : 10 ,
173
+ "write_disposition" : "WRITE_APPEND" ,
174
+ },
175
+ },
176
+ after : map [string ]interface {}{
177
+ "data_source_id" : "amazon_s3" ,
178
+ "params" : map [string ]interface {}{
179
+ "data_path" : "s3://s3-bucket-temp-new/*.json" ,
180
+ "destination_table_name_template" : "table-old" ,
181
+ "file_format" : "JSON" ,
182
+ "max_bad_records" : 10 ,
183
+ "write_disposition" : "WRITE_APPEND" ,
184
+ },
185
+ },
186
+ forcenew : true ,
187
+ },
188
+ "changing_destination_table_name_template" : {
189
+ before : map [string ]interface {}{
190
+ "data_source_id" : "amazon_s3" ,
191
+ "params" : map [string ]interface {}{
192
+ "data_path" : "s3://s3-bucket-temp/*.json" ,
193
+ "destination_table_name_template" : "table-old" ,
194
+ "file_format" : "JSON" ,
195
+ "max_bad_records" : 10 ,
196
+ "write_disposition" : "WRITE_APPEND" ,
197
+ },
198
+ },
199
+ after : map [string ]interface {}{
200
+ "data_source_id" : "amazon_s3" ,
201
+ "params" : map [string ]interface {}{
202
+ "data_path" : "s3://s3-bucket-temp/*.json" ,
203
+ "destination_table_name_template" : "table-new" ,
204
+ "file_format" : "JSON" ,
205
+ "max_bad_records" : 10 ,
206
+ "write_disposition" : "WRITE_APPEND" ,
207
+ },
208
+ },
209
+ forcenew : true ,
210
+ },
211
+ "changing_non_force_new_fields" : {
212
+ before : map [string ]interface {}{
213
+ "data_source_id" : "amazon_s3" ,
214
+ "params" : map [string ]interface {}{
215
+ "data_path" : "s3://s3-bucket-temp/*.json" ,
216
+ "destination_table_name_template" : "table-old" ,
217
+ "file_format" : "JSON" ,
218
+ "max_bad_records" : 10 ,
219
+ "write_disposition" : "WRITE_APPEND" ,
220
+ },
221
+ },
222
+ after : map [string ]interface {}{
223
+ "data_source_id" : "amazon_s3" ,
224
+ "params" : map [string ]interface {}{
225
+ "data_path" : "s3://s3-bucket-temp/*.json" ,
226
+ "destination_table_name_template" : "table-old" ,
227
+ "file_format" : "JSON" ,
228
+ "max_bad_records" : 1000 ,
229
+ "write_disposition" : "APPEND" ,
230
+ },
231
+ },
232
+ forcenew : false ,
233
+ },
234
+ "changing_destination_table_name_template_for_different_data_source_id" : {
235
+ before : map [string ]interface {}{
236
+ "data_source_id" : "scheduled_query" ,
237
+ "params" : map [string ]interface {}{
238
+ "destination_table_name_template" : "table-old" ,
239
+ "query" : "SELECT 1 AS a" ,
240
+ "write_disposition" : "WRITE_APPEND" ,
241
+ },
242
+ },
243
+ after : map [string ]interface {}{
244
+ "data_source_id" : "scheduled_query" ,
245
+ "params" : map [string ]interface {}{
246
+ "destination_table_name_template" : "table-new" ,
247
+ "query" : "SELECT 1 AS a" ,
248
+ "write_disposition" : "WRITE_APPEND" ,
249
+ },
250
+ },
251
+ forcenew : false ,
252
+ },
253
+ "changing_data_path_template_for_different_data_source_id" : {
254
+ before : map [string ]interface {}{
255
+ "data_source_id" : "scheduled_query" ,
256
+ "params" : map [string ]interface {}{
257
+ "data_path" : "s3://s3-bucket-temp/*.json" ,
258
+ "query" : "SELECT 1 AS a" ,
259
+ "write_disposition" : "WRITE_APPEND" ,
260
+ },
261
+ },
262
+ after : map [string ]interface {}{
263
+ "data_source_id" : "scheduled_query" ,
264
+ "params" : map [string ]interface {}{
265
+ "data_path" : "s3://s3-bucket-temp-new/*.json" ,
266
+ "query" : "SELECT 1 AS a" ,
267
+ "write_disposition" : "WRITE_APPEND" ,
268
+ },
269
+ },
270
+ forcenew : false ,
271
+ },
272
+ }
273
+
274
+ for tn , tc := range cases {
275
+ d := & tpgresource.ResourceDiffMock {
276
+ Before : map [string ]interface {}{
277
+ "params" : tc .before ["params" ],
278
+ "data_source_id" : tc .before ["data_source_id" ],
279
+ },
280
+ After : map [string ]interface {}{
281
+ "params" : tc .after ["params" ],
282
+ "data_source_id" : tc .after ["data_source_id" ],
283
+ },
284
+ }
285
+ err := bigquerydatatransfer .ParamsCustomizeDiffFunc (d )
286
+ if err != nil {
287
+ t .Errorf ("failed, expected no error but received - %s for the condition %s" , err , tn )
288
+ }
289
+ if d .IsForceNew != tc .forcenew {
290
+ t .Errorf ("ForceNew not setup correctly for the condition-'%s', expected:%v; actual:%v" , tn , tc .forcenew , d .IsForceNew )
291
+ }
292
+ }
293
+ }
294
+
157
295
// The service account TF uses needs the permission granted in the configs
158
296
// but it will get deleted by parallel tests, so they need to be run serially.
159
297
func TestAccBigqueryDataTransferConfig (t * testing.T ) {
0 commit comments