Skip to content

Commit 5fe4b9b

Browse files
Ty Larrabeemodular-magician
Ty Larrabee
authored andcommitted
Add (sub)network to dataflow job
Signed-off-by: Modular Magician <[email protected]>
1 parent 7c37b81 commit 5fe4b9b

File tree

2 files changed

+184
-2
lines changed

2 files changed

+184
-2
lines changed

google/resource_dataflow_job.go

+16-2
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,18 @@ func resourceDataflowJob() *schema.Resource {
9696
Optional: true,
9797
ForceNew: true,
9898
},
99+
100+
"network": {
101+
Type: schema.TypeString,
102+
Optional: true,
103+
ForceNew: true,
104+
},
105+
106+
"subnetwork": {
107+
Type: schema.TypeString,
108+
Optional: true,
109+
ForceNew: true,
110+
},
99111
},
100112
}
101113
}
@@ -121,10 +133,12 @@ func resourceDataflowJobCreate(d *schema.ResourceData, meta interface{}) error {
121133
params := expandStringMap(d, "parameters")
122134

123135
env := dataflow.RuntimeEnvironment{
124-
TempLocation: d.Get("temp_gcs_location").(string),
125-
Zone: zone,
126136
MaxWorkers: int64(d.Get("max_workers").(int)),
137+
Network: d.Get("network").(string),
127138
ServiceAccountEmail: d.Get("service_account_email").(string),
139+
Subnetwork: d.Get("subnetwork").(string),
140+
TempLocation: d.Get("temp_gcs_location").(string),
141+
Zone: zone,
128142
}
129143

130144
request := dataflow.CreateJobFromTemplateRequest{

google/resource_dataflow_job_test.go

+168
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,46 @@ func TestAccDataflowJobCreateWithServiceAccount(t *testing.T) {
6868
})
6969
}
7070

71+
func TestAccDataflowJobCreateWithNetwork(t *testing.T) {
72+
t.Parallel()
73+
resource.Test(t, resource.TestCase{
74+
PreCheck: func() { testAccPreCheck(t) },
75+
Providers: testAccProviders,
76+
CheckDestroy: testAccCheckDataflowJobDestroy,
77+
Steps: []resource.TestStep{
78+
{
79+
Config: testAccDataflowJobWithNetwork,
80+
Check: resource.ComposeTestCheckFunc(
81+
testAccDataflowJobExists(
82+
"google_dataflow_job.big_data"),
83+
testAccDataflowJobHasNetwork(
84+
"google_dataflow_job.big_data"),
85+
),
86+
},
87+
},
88+
})
89+
}
90+
91+
func TestAccDataflowJobCreateWithSubnetwork(t *testing.T) {
92+
t.Parallel()
93+
resource.Test(t, resource.TestCase{
94+
PreCheck: func() { testAccPreCheck(t) },
95+
Providers: testAccProviders,
96+
CheckDestroy: testAccCheckDataflowJobDestroy,
97+
Steps: []resource.TestStep{
98+
{
99+
Config: testAccDataflowJobWithSubnetwork,
100+
Check: resource.ComposeTestCheckFunc(
101+
testAccDataflowJobExists(
102+
"google_dataflow_job.big_data"),
103+
testAccDataflowJobHasSubnetwork(
104+
"google_dataflow_job.big_data"),
105+
),
106+
},
107+
},
108+
})
109+
}
110+
71111
func testAccCheckDataflowJobDestroy(s *terraform.State) error {
72112
for _, rs := range s.RootModule().Resources {
73113
if rs.Type != "google_dataflow_job" {
@@ -128,6 +168,70 @@ func testAccDataflowJobExists(n string) resource.TestCheckFunc {
128168
}
129169
}
130170

171+
func testAccDataflowJobHasNetwork(n string) resource.TestCheckFunc {
172+
return func(s *terraform.State) error {
173+
rs, ok := s.RootModule().Resources[n]
174+
if !ok {
175+
return fmt.Errorf("Not found: %s", n)
176+
}
177+
if rs.Primary.ID == "" {
178+
return fmt.Errorf("No ID is set")
179+
}
180+
181+
config := testAccProvider.Meta().(*Config)
182+
183+
job, err := config.clientDataflow.Projects.Jobs.Get(config.Project, rs.Primary.ID).Do()
184+
if err != nil {
185+
return fmt.Errorf("Job does not exist")
186+
}
187+
resource.Retry(1*time.Minute, func() *resource.RetryError {
188+
pools := job.Environment.WorkerPools
189+
if len(pools) < 1 {
190+
return resource.RetryableError(fmt.Errorf("no worker pools for job"))
191+
}
192+
network := pools[0].Network
193+
if network != rs.Primary.Attributes["network"] {
194+
return resource.RetryableError(fmt.Errorf("Network mismatch: %s != %s", network, rs.Primary.Attributes["network"]))
195+
}
196+
return nil
197+
})
198+
199+
return nil
200+
}
201+
}
202+
203+
func testAccDataflowJobHasSubnetwork(n string) resource.TestCheckFunc {
204+
return func(s *terraform.State) error {
205+
rs, ok := s.RootModule().Resources[n]
206+
if !ok {
207+
return fmt.Errorf("Not found: %s", n)
208+
}
209+
if rs.Primary.ID == "" {
210+
return fmt.Errorf("No ID is set")
211+
}
212+
213+
config := testAccProvider.Meta().(*Config)
214+
215+
job, err := config.clientDataflow.Projects.Jobs.Get(config.Project, rs.Primary.ID).Do()
216+
if err != nil {
217+
return fmt.Errorf("Job does not exist")
218+
}
219+
resource.Retry(1*time.Minute, func() *resource.RetryError {
220+
pools := job.Environment.WorkerPools
221+
if len(pools) < 1 {
222+
return resource.RetryableError(fmt.Errorf("no worker pools for job"))
223+
}
224+
subnetwork := pools[0].Subnetwork
225+
if subnetwork != rs.Primary.Attributes["subnetwork"] {
226+
return resource.RetryableError(fmt.Errorf("Subnetwork mismatch: %s != %s", subnetwork, rs.Primary.Attributes["subnetwork"]))
227+
}
228+
return nil
229+
})
230+
231+
return nil
232+
}
233+
}
234+
131235
func testAccDataflowJobHasServiceAccount(n string) resource.TestCheckFunc {
132236
return func(s *terraform.State) error {
133237
rs, ok := s.RootModule().Resources[n]
@@ -247,6 +351,70 @@ resource "google_dataflow_job" "big_data" {
247351
on_delete = "cancel"
248352
}`, acctest.RandString(10), acctest.RandString(10), getTestProjectFromEnv())
249353

354+
var testAccDataflowJobWithNetwork = fmt.Sprintf(`
355+
resource "google_storage_bucket" "temp" {
356+
name = "dfjob-test-%s-temp"
357+
358+
force_destroy = true
359+
}
360+
361+
resource "google_compute_network" "net" {
362+
name = "dfjob-test-%s-net"
363+
auto_create_subnetworks = true
364+
}
365+
366+
resource "google_dataflow_job" "big_data" {
367+
name = "dfjob-test-%s"
368+
369+
template_gcs_path = "gs://dataflow-templates/wordcount/template_file"
370+
temp_gcs_location = "${google_storage_bucket.temp.url}"
371+
network = "${google_compute_network.net.name}"
372+
373+
parameters = {
374+
inputFile = "gs://dataflow-samples/shakespeare/kinglear.txt"
375+
output = "${google_storage_bucket.temp.url}/output"
376+
}
377+
zone = "us-central1-f"
378+
project = "%s"
379+
380+
on_delete = "cancel"
381+
}`, acctest.RandString(10), acctest.RandString(10), acctest.RandString(10), getTestProjectFromEnv())
382+
383+
var testAccDataflowJobWithSubnetwork = fmt.Sprintf(`
384+
resource "google_storage_bucket" "temp" {
385+
name = "dfjob-test-%s-temp"
386+
387+
force_destroy = true
388+
}
389+
390+
resource "google_compute_network" "net" {
391+
name = "dfjob-test-%s-net"
392+
auto_create_subnetworks = false
393+
}
394+
395+
resource "google_compute_subnetwork" "subnet" {
396+
name = "dfjob-test-%s-subnet"
397+
ip_cidr_range = "10.2.0.0/16"
398+
network = "${google_compute_network.net.self_link}"
399+
}
400+
401+
resource "google_dataflow_job" "big_data" {
402+
name = "dfjob-test-%s"
403+
404+
template_gcs_path = "gs://dataflow-templates/wordcount/template_file"
405+
temp_gcs_location = "${google_storage_bucket.temp.url}"
406+
subnetwork = "${google_compute_subnetwork.subnet.self_link}"
407+
408+
parameters = {
409+
inputFile = "gs://dataflow-samples/shakespeare/kinglear.txt"
410+
output = "${google_storage_bucket.temp.url}/output"
411+
}
412+
zone = "us-central1-f"
413+
project = "%s"
414+
415+
on_delete = "cancel"
416+
}`, acctest.RandString(10), acctest.RandString(10), acctest.RandString(10), acctest.RandString(10), getTestProjectFromEnv())
417+
250418
var testAccDataflowJobWithServiceAccount = fmt.Sprintf(`
251419
resource "google_storage_bucket" "temp" {
252420
name = "dfjob-test-%s-temp"

0 commit comments

Comments
 (0)