Skip to content

Commit 281077c

Browse files
Allow Private Worker Pool for Cloud Functions Cloud Builds. (#6510) (#12591)
* Allow custom worker pools for Cloud Functions build * Fix test for cloud functions cloud build worker pool * Add build_worker_pool to CRU operations Signed-off-by: Modular Magician <[email protected]> Signed-off-by: Modular Magician <[email protected]>
1 parent 924b5a7 commit 281077c

File tree

3 files changed

+99
-0
lines changed

3 files changed

+99
-0
lines changed

.changelog/6510.txt

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
```release-note:enhancement
2+
cloudfunctions: Added `build_worker_pool` to `google_cloudfunctions_function`
3+
```

google/resource_cloudfunctions_function.go

+18
Original file line numberDiff line numberDiff line change
@@ -112,6 +112,12 @@ func resourceCloudFunctionsFunction() *schema.Resource {
112112
ValidateFunc: validateResourceCloudFunctionsFunctionName,
113113
},
114114

115+
"build_worker_pool": {
116+
Type: schema.TypeString,
117+
Optional: true,
118+
Description: `Name of the Cloud Build Custom Worker Pool that should be used to build the function.`,
119+
},
120+
115121
"source_archive_bucket": {
116122
Type: schema.TypeString,
117123
Optional: true,
@@ -483,6 +489,10 @@ func resourceCloudFunctionsCreate(d *schema.ResourceData, meta interface{}) erro
483489
function.Description = v.(string)
484490
}
485491

492+
if v, ok := d.GetOk("build_worker_pool"); ok {
493+
function.BuildWorkerPool = v.(string)
494+
}
495+
486496
if v, ok := d.GetOk("entry_point"); ok {
487497
function.EntryPoint = v.(string)
488498
}
@@ -593,6 +603,9 @@ func resourceCloudFunctionsRead(d *schema.ResourceData, meta interface{}) error
593603
if err := d.Set("description", function.Description); err != nil {
594604
return fmt.Errorf("Error setting description: %s", err)
595605
}
606+
if err := d.Set("build_worker_pool", function.BuildWorkerPool); err != nil {
607+
return fmt.Errorf("Error setting build_worker_pool: %s", err)
608+
}
596609
if err := d.Set("entry_point", function.EntryPoint); err != nil {
597610
return fmt.Errorf("Error setting entry_point: %s", err)
598611
}
@@ -760,6 +773,11 @@ func resourceCloudFunctionsUpdate(d *schema.ResourceData, meta interface{}) erro
760773
updateMaskArr = append(updateMaskArr, "description")
761774
}
762775

776+
if d.HasChange("build_worker_pool") {
777+
function.BuildWorkerPool = d.Get("build_worker_pool").(string)
778+
updateMaskArr = append(updateMaskArr, "build_worker_pool")
779+
}
780+
763781
if d.HasChange("timeout") {
764782
function.Timeout = fmt.Sprintf("%vs", d.Get("timeout").(int))
765783
updateMaskArr = append(updateMaskArr, "timeout")

google/resource_cloudfunctions_function_test.go

+78
Original file line numberDiff line numberDiff line change
@@ -245,6 +245,46 @@ func TestAccCloudFunctionsFunction_update(t *testing.T) {
245245
})
246246
}
247247

248+
func TestAccCloudFunctionsFunction_buildworkerpool(t *testing.T) {
249+
t.Parallel()
250+
251+
var function cloudfunctions.CloudFunction
252+
253+
funcResourceName := "google_cloudfunctions_function.function"
254+
functionName := fmt.Sprintf("tf-test-%s", randString(t, 10))
255+
bucketName := fmt.Sprintf("tf-test-bucket-%d", randInt(t))
256+
location := "us-central1"
257+
zipFilePath := createZIPArchiveForCloudFunctionSource(t, testHTTPTriggerPath)
258+
proj := getTestProjectFromEnv()
259+
260+
defer os.Remove(zipFilePath) // clean up
261+
262+
vcrTest(t, resource.TestCase{
263+
PreCheck: func() { testAccPreCheck(t) },
264+
Providers: testAccProviders,
265+
CheckDestroy: testAccCheckCloudFunctionsFunctionDestroyProducer(t),
266+
Steps: []resource.TestStep{
267+
{
268+
Config: testAccCloudFunctionsFunction_buildworkerpool(functionName, bucketName, zipFilePath, location),
269+
Check: resource.ComposeTestCheckFunc(
270+
testAccCloudFunctionsFunctionExists(
271+
t, funcResourceName, &function),
272+
resource.TestCheckResourceAttr(funcResourceName,
273+
"name", functionName),
274+
resource.TestCheckResourceAttr(funcResourceName,
275+
"build_worker_pool", fmt.Sprintf("projects/%s/locations/%s/workerPools/pool-%s", proj, location, functionName)),
276+
),
277+
},
278+
{
279+
ResourceName: funcResourceName,
280+
ImportState: true,
281+
ImportStateVerify: true,
282+
ImportStateVerifyIgnore: []string{"build_environment_variables"},
283+
},
284+
},
285+
})
286+
}
287+
248288
func TestAccCloudFunctionsFunction_pubsub(t *testing.T) {
249289
t.Parallel()
250290

@@ -766,6 +806,44 @@ resource "google_cloudfunctions_function" "function" {
766806
`, bucketName, zipFilePath, functionName)
767807
}
768808

809+
func testAccCloudFunctionsFunction_buildworkerpool(functionName string, bucketName string, zipFilePath string, location string) string {
810+
return fmt.Sprintf(`
811+
resource "google_storage_bucket" "bucket" {
812+
name = "%s"
813+
location = "US"
814+
}
815+
816+
resource "google_storage_bucket_object" "archive" {
817+
name = "index.zip"
818+
bucket = google_storage_bucket.bucket.name
819+
source = "%s"
820+
}
821+
822+
resource "google_cloudbuild_worker_pool" "pool" {
823+
name = "pool-%[3]s"
824+
location = "%s"
825+
worker_config {
826+
disk_size_gb = 100
827+
machine_type = "e2-standard-4"
828+
no_external_ip = false
829+
}
830+
}
831+
832+
resource "google_cloudfunctions_function" "function" {
833+
name = "%[3]s"
834+
runtime = "nodejs10"
835+
description = "test function"
836+
docker_registry = "CONTAINER_REGISTRY"
837+
available_memory_mb = 128
838+
source_archive_bucket = google_storage_bucket.bucket.name
839+
source_archive_object = google_storage_bucket_object.archive.name
840+
trigger_http = true
841+
timeout = 61
842+
entry_point = "helloGET"
843+
build_worker_pool = google_cloudbuild_worker_pool.pool.id
844+
}`, bucketName, zipFilePath, functionName, location)
845+
}
846+
769847
func testAccCloudFunctionsFunction_pubsub(functionName string, bucketName string,
770848
topic string, zipFilePath string) string {
771849
return fmt.Sprintf(`

0 commit comments

Comments
 (0)