@@ -68,6 +68,46 @@ func TestAccDataflowJobCreateWithServiceAccount(t *testing.T) {
68
68
})
69
69
}
70
70
71
+ func TestAccDataflowJobCreateWithNetwork (t * testing.T ) {
72
+ t .Parallel ()
73
+ resource .Test (t , resource.TestCase {
74
+ PreCheck : func () { testAccPreCheck (t ) },
75
+ Providers : testAccProviders ,
76
+ CheckDestroy : testAccCheckDataflowJobDestroy ,
77
+ Steps : []resource.TestStep {
78
+ {
79
+ Config : testAccDataflowJobWithNetwork ,
80
+ Check : resource .ComposeTestCheckFunc (
81
+ testAccDataflowJobExists (
82
+ "google_dataflow_job.big_data" ),
83
+ testAccDataflowJobHasNetwork (
84
+ "google_dataflow_job.big_data" ),
85
+ ),
86
+ },
87
+ },
88
+ })
89
+ }
90
+
91
+ func TestAccDataflowJobCreateWithSubnetwork (t * testing.T ) {
92
+ t .Parallel ()
93
+ resource .Test (t , resource.TestCase {
94
+ PreCheck : func () { testAccPreCheck (t ) },
95
+ Providers : testAccProviders ,
96
+ CheckDestroy : testAccCheckDataflowJobDestroy ,
97
+ Steps : []resource.TestStep {
98
+ {
99
+ Config : testAccDataflowJobWithSubnetwork ,
100
+ Check : resource .ComposeTestCheckFunc (
101
+ testAccDataflowJobExists (
102
+ "google_dataflow_job.big_data" ),
103
+ testAccDataflowJobHasSubnetwork (
104
+ "google_dataflow_job.big_data" ),
105
+ ),
106
+ },
107
+ },
108
+ })
109
+ }
110
+
71
111
func testAccCheckDataflowJobDestroy (s * terraform.State ) error {
72
112
for _ , rs := range s .RootModule ().Resources {
73
113
if rs .Type != "google_dataflow_job" {
@@ -128,6 +168,70 @@ func testAccDataflowJobExists(n string) resource.TestCheckFunc {
128
168
}
129
169
}
130
170
171
+ func testAccDataflowJobHasNetwork (n string ) resource.TestCheckFunc {
172
+ return func (s * terraform.State ) error {
173
+ rs , ok := s .RootModule ().Resources [n ]
174
+ if ! ok {
175
+ return fmt .Errorf ("Not found: %s" , n )
176
+ }
177
+ if rs .Primary .ID == "" {
178
+ return fmt .Errorf ("No ID is set" )
179
+ }
180
+
181
+ config := testAccProvider .Meta ().(* Config )
182
+
183
+ job , err := config .clientDataflow .Projects .Jobs .Get (config .Project , rs .Primary .ID ).Do ()
184
+ if err != nil {
185
+ return fmt .Errorf ("Job does not exist" )
186
+ }
187
+ resource .Retry (1 * time .Minute , func () * resource.RetryError {
188
+ pools := job .Environment .WorkerPools
189
+ if len (pools ) < 1 {
190
+ return resource .RetryableError (fmt .Errorf ("no worker pools for job" ))
191
+ }
192
+ network := pools [0 ].Network
193
+ if network != rs .Primary .Attributes ["network" ] {
194
+ return resource .RetryableError (fmt .Errorf ("Network mismatch: %s != %s" , network , rs .Primary .Attributes ["network" ]))
195
+ }
196
+ return nil
197
+ })
198
+
199
+ return nil
200
+ }
201
+ }
202
+
203
+ func testAccDataflowJobHasSubnetwork (n string ) resource.TestCheckFunc {
204
+ return func (s * terraform.State ) error {
205
+ rs , ok := s .RootModule ().Resources [n ]
206
+ if ! ok {
207
+ return fmt .Errorf ("Not found: %s" , n )
208
+ }
209
+ if rs .Primary .ID == "" {
210
+ return fmt .Errorf ("No ID is set" )
211
+ }
212
+
213
+ config := testAccProvider .Meta ().(* Config )
214
+
215
+ job , err := config .clientDataflow .Projects .Jobs .Get (config .Project , rs .Primary .ID ).Do ()
216
+ if err != nil {
217
+ return fmt .Errorf ("Job does not exist" )
218
+ }
219
+ resource .Retry (1 * time .Minute , func () * resource.RetryError {
220
+ pools := job .Environment .WorkerPools
221
+ if len (pools ) < 1 {
222
+ return resource .RetryableError (fmt .Errorf ("no worker pools for job" ))
223
+ }
224
+ subnetwork := pools [0 ].Subnetwork
225
+ if subnetwork != rs .Primary .Attributes ["subnetwork" ] {
226
+ return resource .RetryableError (fmt .Errorf ("Subnetwork mismatch: %s != %s" , subnetwork , rs .Primary .Attributes ["subnetwork" ]))
227
+ }
228
+ return nil
229
+ })
230
+
231
+ return nil
232
+ }
233
+ }
234
+
131
235
func testAccDataflowJobHasServiceAccount (n string ) resource.TestCheckFunc {
132
236
return func (s * terraform.State ) error {
133
237
rs , ok := s .RootModule ().Resources [n ]
@@ -247,6 +351,70 @@ resource "google_dataflow_job" "big_data" {
247
351
on_delete = "cancel"
248
352
}` , acctest .RandString (10 ), acctest .RandString (10 ), getTestProjectFromEnv ())
249
353
354
+ var testAccDataflowJobWithNetwork = fmt .Sprintf (`
355
+ resource "google_storage_bucket" "temp" {
356
+ name = "dfjob-test-%s-temp"
357
+
358
+ force_destroy = true
359
+ }
360
+
361
+ resource "google_compute_network" "net" {
362
+ name = "dfjob-test-%s-net"
363
+ auto_create_subnetworks = true
364
+ }
365
+
366
+ resource "google_dataflow_job" "big_data" {
367
+ name = "dfjob-test-%s"
368
+
369
+ template_gcs_path = "gs://dataflow-templates/wordcount/template_file"
370
+ temp_gcs_location = "${google_storage_bucket.temp.url}"
371
+ network = "${google_compute_network.net.name}"
372
+
373
+ parameters = {
374
+ inputFile = "gs://dataflow-samples/shakespeare/kinglear.txt"
375
+ output = "${google_storage_bucket.temp.url}/output"
376
+ }
377
+ zone = "us-central1-f"
378
+ project = "%s"
379
+
380
+ on_delete = "cancel"
381
+ }` , acctest .RandString (10 ), acctest .RandString (10 ), acctest .RandString (10 ), getTestProjectFromEnv ())
382
+
383
+ var testAccDataflowJobWithSubnetwork = fmt .Sprintf (`
384
+ resource "google_storage_bucket" "temp" {
385
+ name = "dfjob-test-%s-temp"
386
+
387
+ force_destroy = true
388
+ }
389
+
390
+ resource "google_compute_network" "net" {
391
+ name = "dfjob-test-%s-net"
392
+ auto_create_subnetworks = false
393
+ }
394
+
395
+ resource "google_compute_subnetwork" "subnet" {
396
+ name = "dfjob-test-%s-subnet"
397
+ ip_cidr_range = "10.2.0.0/16"
398
+ network = "${google_compute_network.net.self_link}"
399
+ }
400
+
401
+ resource "google_dataflow_job" "big_data" {
402
+ name = "dfjob-test-%s"
403
+
404
+ template_gcs_path = "gs://dataflow-templates/wordcount/template_file"
405
+ temp_gcs_location = "${google_storage_bucket.temp.url}"
406
+ subnetwork = "${google_compute_subnetwork.subnet.self_link}"
407
+
408
+ parameters = {
409
+ inputFile = "gs://dataflow-samples/shakespeare/kinglear.txt"
410
+ output = "${google_storage_bucket.temp.url}/output"
411
+ }
412
+ zone = "us-central1-f"
413
+ project = "%s"
414
+
415
+ on_delete = "cancel"
416
+ }` , acctest .RandString (10 ), acctest .RandString (10 ), acctest .RandString (10 ), acctest .RandString (10 ), getTestProjectFromEnv ())
417
+
250
418
var testAccDataflowJobWithServiceAccount = fmt .Sprintf (`
251
419
resource "google_storage_bucket" "temp" {
252
420
name = "dfjob-test-%s-temp"
0 commit comments