Skip to content

Commit 51ff7d0

Browse files
added triggerId to jobTrigger resource (#8091) (#14892)
Signed-off-by: Modular Magician <[email protected]>
1 parent 7fab982 commit 51ff7d0

File tree

4 files changed

+162
-11
lines changed

4 files changed

+162
-11
lines changed

.changelog/8091.txt

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
```release-note:enhancement
2+
dlp: added `trigger_id` field to `google_data_loss_prevention_job_trigger`
3+
```

google/resource_data_loss_prevention_job_trigger_generated_test.go

+74-10
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerBasicExample(t *testing.T)
5050
ResourceName: "google_data_loss_prevention_job_trigger.basic",
5151
ImportState: true,
5252
ImportStateVerify: true,
53-
ImportStateVerifyIgnore: []string{"parent"},
53+
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
5454
},
5555
},
5656
})
@@ -113,7 +113,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerBigqueryRowLimitExample(t
113113
ResourceName: "google_data_loss_prevention_job_trigger.bigquery_row_limit",
114114
ImportState: true,
115115
ImportStateVerify: true,
116-
ImportStateVerifyIgnore: []string{"parent"},
116+
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
117117
},
118118
},
119119
})
@@ -181,7 +181,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerBigqueryRowLimitPercentage
181181
ResourceName: "google_data_loss_prevention_job_trigger.bigquery_row_limit_percentage",
182182
ImportState: true,
183183
ImportStateVerify: true,
184-
ImportStateVerifyIgnore: []string{"parent"},
184+
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
185185
},
186186
},
187187
})
@@ -249,7 +249,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerDataCatalogOutputExample(t
249249
ResourceName: "google_data_loss_prevention_job_trigger.data_catalog_output",
250250
ImportState: true,
251251
ImportStateVerify: true,
252-
ImportStateVerifyIgnore: []string{"parent"},
252+
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
253253
},
254254
},
255255
})
@@ -310,7 +310,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerSccOutputExample(t *testin
310310
ResourceName: "google_data_loss_prevention_job_trigger.scc_output",
311311
ImportState: true,
312312
ImportStateVerify: true,
313-
ImportStateVerifyIgnore: []string{"parent"},
313+
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
314314
},
315315
},
316316
})
@@ -371,7 +371,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerJobNotificationEmailsExamp
371371
ResourceName: "google_data_loss_prevention_job_trigger.job_notification_emails",
372372
ImportState: true,
373373
ImportStateVerify: true,
374-
ImportStateVerifyIgnore: []string{"parent"},
374+
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
375375
},
376376
},
377377
})
@@ -428,7 +428,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerDeidentifyExample(t *testi
428428
ResourceName: "google_data_loss_prevention_job_trigger.deidentify",
429429
ImportState: true,
430430
ImportStateVerify: true,
431-
ImportStateVerifyIgnore: []string{"parent"},
431+
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
432432
},
433433
},
434434
})
@@ -542,7 +542,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerHybridExample(t *testing.T
542542
ResourceName: "google_data_loss_prevention_job_trigger.hybrid_trigger",
543543
ImportState: true,
544544
ImportStateVerify: true,
545-
ImportStateVerifyIgnore: []string{"parent"},
545+
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
546546
},
547547
},
548548
})
@@ -610,7 +610,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerInspectExample(t *testing.
610610
ResourceName: "google_data_loss_prevention_job_trigger.inspect",
611611
ImportState: true,
612612
ImportStateVerify: true,
613-
ImportStateVerifyIgnore: []string{"parent"},
613+
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
614614
},
615615
},
616616
})
@@ -729,7 +729,7 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerPublishToStackdriverExampl
729729
ResourceName: "google_data_loss_prevention_job_trigger.publish_to_stackdriver",
730730
ImportState: true,
731731
ImportStateVerify: true,
732-
ImportStateVerifyIgnore: []string{"parent"},
732+
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
733733
},
734734
},
735735
})
@@ -765,6 +765,70 @@ resource "google_data_loss_prevention_job_trigger" "publish_to_stackdriver" {
765765
`, context)
766766
}
767767

768+
func TestAccDataLossPreventionJobTrigger_dlpJobTriggerWithIdExample(t *testing.T) {
769+
t.Parallel()
770+
771+
context := map[string]interface{}{
772+
"project": acctest.GetTestProjectFromEnv(),
773+
"random_suffix": RandString(t, 10),
774+
}
775+
776+
VcrTest(t, resource.TestCase{
777+
PreCheck: func() { acctest.AccTestPreCheck(t) },
778+
ProtoV5ProviderFactories: ProtoV5ProviderFactories(t),
779+
CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t),
780+
Steps: []resource.TestStep{
781+
{
782+
Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerWithIdExample(context),
783+
},
784+
{
785+
ResourceName: "google_data_loss_prevention_job_trigger.with_trigger_id",
786+
ImportState: true,
787+
ImportStateVerify: true,
788+
ImportStateVerifyIgnore: []string{"trigger_id", "parent"},
789+
},
790+
},
791+
})
792+
}
793+
794+
func testAccDataLossPreventionJobTrigger_dlpJobTriggerWithIdExample(context map[string]interface{}) string {
795+
return tpgresource.Nprintf(`
796+
resource "google_data_loss_prevention_job_trigger" "with_trigger_id" {
797+
parent = "projects/%{project}"
798+
description = "Starting description"
799+
display_name = "display"
800+
trigger_id = "tf-test-id-%{random_suffix}"
801+
802+
triggers {
803+
schedule {
804+
recurrence_period_duration = "86400s"
805+
}
806+
}
807+
808+
inspect_job {
809+
inspect_template_name = "fake"
810+
actions {
811+
save_findings {
812+
output_config {
813+
table {
814+
project_id = "project"
815+
dataset_id = "dataset123"
816+
}
817+
}
818+
}
819+
}
820+
storage_config {
821+
cloud_storage_options {
822+
file_set {
823+
url = "gs://mybucket/directory/"
824+
}
825+
}
826+
}
827+
}
828+
}
829+
`, context)
830+
}
831+
768832
func testAccCheckDataLossPreventionJobTriggerDestroyProducer(t *testing.T) func(s *terraform.State) error {
769833
return func(s *terraform.State) error {
770834
for name, rs := range s.RootModule().Resources {

google/services/datalossprevention/resource_data_loss_prevention_job_trigger.go

+41-1
Original file line numberDiff line numberDiff line change
@@ -1374,6 +1374,15 @@ at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built
13741374
Description: `Whether the trigger is currently active. Default value: "HEALTHY" Possible values: ["PAUSED", "HEALTHY", "CANCELLED"]`,
13751375
Default: "HEALTHY",
13761376
},
1377+
"trigger_id": {
1378+
Type: schema.TypeString,
1379+
Computed: true,
1380+
Optional: true,
1381+
ForceNew: true,
1382+
Description: `The trigger id can contain uppercase and lowercase letters, numbers, and hyphens;
1383+
that is, it must match the regular expression: [a-zA-Z\d-_]+.
1384+
The maximum length is 100 characters. Can be empty to allow the system to generate one.`,
1385+
},
13771386
"create_time": {
13781387
Type: schema.TypeString,
13791388
Computed: true,
@@ -1514,6 +1523,18 @@ func resourceDataLossPreventionJobTriggerRead(d *schema.ResourceData, meta inter
15141523
return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("DataLossPreventionJobTrigger %q", d.Id()))
15151524
}
15161525

1526+
res, err = resourceDataLossPreventionJobTriggerDecoder(d, meta, res)
1527+
if err != nil {
1528+
return err
1529+
}
1530+
1531+
if res == nil {
1532+
// Decoding the object has resulted in it being gone. It may be marked deleted
1533+
log.Printf("[DEBUG] Removing DataLossPreventionJobTrigger because it no longer exists.")
1534+
d.SetId("")
1535+
return nil
1536+
}
1537+
15171538
if err := d.Set("name", flattenDataLossPreventionJobTriggerName(res["name"], d, config)); err != nil {
15181539
return fmt.Errorf("Error reading JobTrigger: %s", err)
15191540
}
@@ -1586,7 +1607,7 @@ func resourceDataLossPreventionJobTriggerUpdate(d *schema.ResourceData, meta int
15861607
obj["inspectJob"] = inspectJobProp
15871608
}
15881609

1589-
obj, err = resourceDataLossPreventionJobTriggerEncoder(d, meta, obj)
1610+
obj, err = resourceDataLossPreventionJobTriggerUpdateEncoder(d, meta, obj)
15901611
if err != nil {
15911612
return err
15921613
}
@@ -5761,7 +5782,26 @@ func expandDataLossPreventionJobTriggerInspectJobActionsPublishToStackdriver(v i
57615782
}
57625783

57635784
func resourceDataLossPreventionJobTriggerEncoder(d *schema.ResourceData, meta interface{}, obj map[string]interface{}) (map[string]interface{}, error) {
5785+
57645786
newObj := make(map[string]interface{})
57655787
newObj["jobTrigger"] = obj
5788+
triggerIdProp, ok := d.GetOk("trigger_id")
5789+
if ok && triggerIdProp != nil {
5790+
newObj["triggerId"] = triggerIdProp
5791+
}
57665792
return newObj, nil
57675793
}
5794+
5795+
func resourceDataLossPreventionJobTriggerUpdateEncoder(d *schema.ResourceData, meta interface{}, obj map[string]interface{}) (map[string]interface{}, error) {
5796+
newObj := make(map[string]interface{})
5797+
newObj["jobTrigger"] = obj
5798+
return newObj, nil
5799+
}
5800+
5801+
func resourceDataLossPreventionJobTriggerDecoder(d *schema.ResourceData, meta interface{}, res map[string]interface{}) (map[string]interface{}, error) {
5802+
config := meta.(*transport_tpg.Config)
5803+
if err := d.Set("trigger_id", flattenDataLossPreventionJobTriggerName(res["name"], d, config)); err != nil {
5804+
return nil, fmt.Errorf("Error reading JobTrigger: %s", err)
5805+
}
5806+
return res, nil
5807+
}

website/docs/r/data_loss_prevention_job_trigger.html.markdown

+44
Original file line numberDiff line numberDiff line change
@@ -432,6 +432,44 @@ resource "google_data_loss_prevention_job_trigger" "publish_to_stackdriver" {
432432
}
433433
}
434434
```
435+
## Example Usage - Dlp Job Trigger With Id
436+
437+
438+
```hcl
439+
resource "google_data_loss_prevention_job_trigger" "with_trigger_id" {
440+
parent = "projects/my-project-name"
441+
description = "Starting description"
442+
display_name = "display"
443+
trigger_id = "id-"
444+
445+
triggers {
446+
schedule {
447+
recurrence_period_duration = "86400s"
448+
}
449+
}
450+
451+
inspect_job {
452+
inspect_template_name = "fake"
453+
actions {
454+
save_findings {
455+
output_config {
456+
table {
457+
project_id = "project"
458+
dataset_id = "dataset123"
459+
}
460+
}
461+
}
462+
}
463+
storage_config {
464+
cloud_storage_options {
465+
file_set {
466+
url = "gs://mybucket/directory/"
467+
}
468+
}
469+
}
470+
}
471+
}
472+
```
435473

436474
## Argument Reference
437475

@@ -481,6 +519,12 @@ The following arguments are supported:
481519
(Optional)
482520
User set display name of the job trigger.
483521

522+
* `trigger_id` -
523+
(Optional)
524+
The trigger id can contain uppercase and lowercase letters, numbers, and hyphens;
525+
that is, it must match the regular expression: [a-zA-Z\d-_]+.
526+
The maximum length is 100 characters. Can be empty to allow the system to generate one.
527+
484528
* `status` -
485529
(Optional)
486530
Whether the trigger is currently active.

0 commit comments

Comments
 (0)