Skip to content

Commit 9fb464c

Browse files
Add identifying fields to DLP job trigger (#7037) (#13463)
* Add identifying fields to DLP job trigger * Updated identifyingFields description. * Removed unused variable and converted tabs to spaces in example file. * Added DLP identifying fields update test * Updated bigquery field name description * Removed changes to terraform.yaml file Signed-off-by: Modular Magician <[email protected]> Signed-off-by: Modular Magician <[email protected]>
1 parent be45107 commit 9fb464c

4 files changed

+210
-0
lines changed

.changelog/7037.txt

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
```release-note:enhancement
2+
dlp: Added field 'identifyingFields' to 'bigQueryOptions' for creating DLP jobs.
3+
```

google/resource_data_loss_prevention_job_trigger.go

+72
Original file line numberDiff line numberDiff line change
@@ -225,6 +225,21 @@ Only for use with external storage. Possible values: ["BASIC_COLUMNS", "GCS_COLU
225225
},
226226
},
227227
},
228+
"identifying_fields": {
229+
Type: schema.TypeList,
230+
Optional: true,
231+
Description: `Specifies the BigQuery fields that will be returned with findings.
232+
If not specified, no identifying fields will be returned for findings.`,
233+
Elem: &schema.Resource{
234+
Schema: map[string]*schema.Schema{
235+
"name": {
236+
Type: schema.TypeString,
237+
Required: true,
238+
Description: `Name of a BigQuery field to be returned with the findings.`,
239+
},
240+
},
241+
},
242+
},
228243
"rows_limit": {
229244
Type: schema.TypeInt,
230245
Optional: true,
@@ -1114,6 +1129,8 @@ func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptions(v
11141129
flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsRowsLimitPercent(original["rowsLimitPercent"], d, config)
11151130
transformed["sample_method"] =
11161131
flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsSampleMethod(original["sampleMethod"], d, config)
1132+
transformed["identifying_fields"] =
1133+
flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFields(original["identifyingFields"], d, config)
11171134
return []interface{}{transformed}
11181135
}
11191136
func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReference(v interface{}, d *schema.ResourceData, config *Config) interface{} {
@@ -1183,6 +1200,28 @@ func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsSa
11831200
return v
11841201
}
11851202

1203+
func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFields(v interface{}, d *schema.ResourceData, config *Config) interface{} {
1204+
if v == nil {
1205+
return v
1206+
}
1207+
l := v.([]interface{})
1208+
transformed := make([]interface{}, 0, len(l))
1209+
for _, raw := range l {
1210+
original := raw.(map[string]interface{})
1211+
if len(original) < 1 {
1212+
// Do not include empty json objects coming back from the api
1213+
continue
1214+
}
1215+
transformed = append(transformed, map[string]interface{}{
1216+
"name": flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFieldsName(original["name"], d, config),
1217+
})
1218+
}
1219+
return transformed
1220+
}
1221+
func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFieldsName(v interface{}, d *schema.ResourceData, config *Config) interface{} {
1222+
return v
1223+
}
1224+
11861225
func flattenDataLossPreventionJobTriggerInspectJobActions(v interface{}, d *schema.ResourceData, config *Config) interface{} {
11871226
if v == nil {
11881227
return v
@@ -1758,6 +1797,13 @@ func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptions(v
17581797
transformed["sampleMethod"] = transformedSampleMethod
17591798
}
17601799

1800+
transformedIdentifyingFields, err := expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFields(original["identifying_fields"], d, config)
1801+
if err != nil {
1802+
return nil, err
1803+
} else if val := reflect.ValueOf(transformedIdentifyingFields); val.IsValid() && !isEmptyValue(val) {
1804+
transformed["identifyingFields"] = transformedIdentifyingFields
1805+
}
1806+
17611807
return transformed, nil
17621808
}
17631809

@@ -1818,6 +1864,32 @@ func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsSam
18181864
return v, nil
18191865
}
18201866

1867+
func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFields(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
1868+
l := v.([]interface{})
1869+
req := make([]interface{}, 0, len(l))
1870+
for _, raw := range l {
1871+
if raw == nil {
1872+
continue
1873+
}
1874+
original := raw.(map[string]interface{})
1875+
transformed := make(map[string]interface{})
1876+
1877+
transformedName, err := expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFieldsName(original["name"], d, config)
1878+
if err != nil {
1879+
return nil, err
1880+
} else if val := reflect.ValueOf(transformedName); val.IsValid() && !isEmptyValue(val) {
1881+
transformed["name"] = transformedName
1882+
}
1883+
1884+
req = append(req, transformed)
1885+
}
1886+
return req, nil
1887+
}
1888+
1889+
func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFieldsName(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
1890+
return v, nil
1891+
}
1892+
18211893
func expandDataLossPreventionJobTriggerInspectJobActions(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
18221894
l := v.([]interface{})
18231895
req := make([]interface{}, 0, len(l))

google/resource_data_loss_prevention_job_trigger_test.go

+123
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,41 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerUpdateExample(t *testing.T
4141
})
4242
}
4343

44+
func TestAccDataLossPreventionJobTrigger_dlpJobTriggerUpdateExample2(t *testing.T) {
45+
t.Parallel()
46+
47+
context := map[string]interface{}{
48+
"project": getTestProjectFromEnv(),
49+
"random_suffix": randString(t, 10),
50+
}
51+
52+
vcrTest(t, resource.TestCase{
53+
PreCheck: func() { testAccPreCheck(t) },
54+
Providers: testAccProviders,
55+
CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t),
56+
Steps: []resource.TestStep{
57+
{
58+
Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerIdentifyingFields(context),
59+
},
60+
{
61+
ResourceName: "google_data_loss_prevention_job_trigger.identifying_fields",
62+
ImportState: true,
63+
ImportStateVerify: true,
64+
ImportStateVerifyIgnore: []string{"parent"},
65+
},
66+
{
67+
Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerIdentifyingFieldsUpdate(context),
68+
},
69+
{
70+
ResourceName: "google_data_loss_prevention_job_trigger.identifying_fields_update",
71+
ImportState: true,
72+
ImportStateVerify: true,
73+
ImportStateVerifyIgnore: []string{"parent"},
74+
},
75+
},
76+
})
77+
}
78+
4479
func TestAccDataLossPreventionJobTrigger_dlpJobTriggerPubsub(t *testing.T) {
4580
t.Parallel()
4681

@@ -103,6 +138,50 @@ resource "google_data_loss_prevention_job_trigger" "basic" {
103138
`, context)
104139
}
105140

141+
func testAccDataLossPreventionJobTrigger_dlpJobTriggerIdentifyingFields(context map[string]interface{}) string {
142+
return Nprintf(`
143+
resource "google_data_loss_prevention_job_trigger" "identifying_fields" {
144+
parent = "projects/%{project}"
145+
description = "Starting description"
146+
display_name = "display"
147+
148+
triggers {
149+
schedule {
150+
recurrence_period_duration = "86400s"
151+
}
152+
}
153+
154+
inspect_job {
155+
inspect_template_name = "fake"
156+
actions {
157+
save_findings {
158+
output_config {
159+
table {
160+
project_id = "project"
161+
dataset_id = "dataset123"
162+
}
163+
}
164+
}
165+
}
166+
storage_config {
167+
big_query_options {
168+
table_reference {
169+
project_id = "project"
170+
dataset_id = "dataset"
171+
table_id = "table_to_scan"
172+
}
173+
rows_limit = 1000
174+
sample_method = "RANDOM_START"
175+
identifying_fields {
176+
name = "field"
177+
}
178+
}
179+
}
180+
}
181+
}
182+
`, context)
183+
}
184+
106185
func testAccDataLossPreventionJobTrigger_dlpJobTriggerUpdate(context map[string]interface{}) string {
107186
return Nprintf(`
108187
resource "google_data_loss_prevention_job_trigger" "basic" {
@@ -140,6 +219,50 @@ resource "google_data_loss_prevention_job_trigger" "basic" {
140219
`, context)
141220
}
142221

222+
func testAccDataLossPreventionJobTrigger_dlpJobTriggerIdentifyingFieldsUpdate(context map[string]interface{}) string {
223+
return Nprintf(`
224+
resource "google_data_loss_prevention_job_trigger" "identifying_fields_update" {
225+
parent = "projects/%{project}"
226+
description = "An updated description"
227+
display_name = "Different"
228+
229+
triggers {
230+
schedule {
231+
recurrence_period_duration = "86400s"
232+
}
233+
}
234+
235+
inspect_job {
236+
inspect_template_name = "fake"
237+
actions {
238+
save_findings {
239+
output_config {
240+
table {
241+
project_id = "project"
242+
dataset_id = "dataset123"
243+
}
244+
}
245+
}
246+
}
247+
storage_config {
248+
big_query_options {
249+
table_reference {
250+
project_id = "project"
251+
dataset_id = "dataset"
252+
table_id = "table_to_scan"
253+
}
254+
rows_limit = 1000
255+
sample_method = "RANDOM_START"
256+
identifying_fields {
257+
name = "different"
258+
}
259+
}
260+
}
261+
}
262+
}
263+
`, context)
264+
}
265+
143266
func testAccDataLossPreventionJobTrigger_publishToPubSub(context map[string]interface{}) string {
144267
return Nprintf(`
145268
resource "google_data_loss_prevention_job_trigger" "pubsub" {

website/docs/r/data_loss_prevention_job_trigger.html.markdown

+12
Original file line numberDiff line numberDiff line change
@@ -406,6 +406,12 @@ The following arguments are supported:
406406
Default value is `TOP`.
407407
Possible values are `TOP` and `RANDOM_START`.
408408

409+
* `identifying_fields` -
410+
(Optional)
411+
Specifies the BigQuery fields that will be returned with findings.
412+
If not specified, no identifying fields will be returned for findings.
413+
Structure is [documented below](#nested_identifying_fields).
414+
409415

410416
<a name="nested_table_reference"></a>The `table_reference` block supports:
411417

@@ -421,6 +427,12 @@ The following arguments are supported:
421427
(Required)
422428
The name of the table.
423429

430+
<a name="nested_identifying_fields"></a>The `identifying_fields` block supports:
431+
432+
* `name` -
433+
(Required)
434+
Name of a BigQuery field to be returned with the findings.
435+
424436
<a name="nested_actions"></a>The `actions` block supports:
425437

426438
* `save_findings` -

0 commit comments

Comments
 (0)