Skip to content

Commit e623d9f

Browse files
Add extra BigQuery options to DLP inspect job trigger (#6749) (#12980)
Signed-off-by: Modular Magician <[email protected]> Signed-off-by: Modular Magician <[email protected]>
1 parent 12d406e commit e623d9f

4 files changed

+345
-4
lines changed

.changelog/6749.txt

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
```release-note:enhancement
2+
dlp: added fields `rows_limit`, `rows_limit_percent`, and `sample_method` to `big_query_options` in `google_data_loss_prevention_job_trigger`
3+
```

google/resource_data_loss_prevention_job_trigger.go

+99
Original file line numberDiff line numberDiff line change
@@ -225,6 +225,28 @@ Only for use with external storage. Possible values: ["BASIC_COLUMNS", "GCS_COLU
225225
},
226226
},
227227
},
228+
"rows_limit": {
229+
Type: schema.TypeInt,
230+
Optional: true,
231+
Description: `Max number of rows to scan. If the table has more rows than this value, the rest of the rows are omitted.
232+
If not set, or if set to 0, all rows will be scanned. Only one of rowsLimit and rowsLimitPercent can be
233+
specified. Cannot be used in conjunction with TimespanConfig.`,
234+
},
235+
"rows_limit_percent": {
236+
Type: schema.TypeInt,
237+
Optional: true,
238+
Description: `Max percentage of rows to scan. The rest are omitted. The number of rows scanned is rounded down.
239+
Must be between 0 and 100, inclusively. Both 0 and 100 means no limit. Defaults to 0. Only one of
240+
rowsLimit and rowsLimitPercent can be specified. Cannot be used in conjunction with TimespanConfig.`,
241+
},
242+
"sample_method": {
243+
Type: schema.TypeString,
244+
Optional: true,
245+
ValidateFunc: validateEnum([]string{"TOP", "RANDOM_START", ""}),
246+
Description: `How to sample rows if not all rows are scanned. Meaningful only when used in conjunction with either
247+
rowsLimit or rowsLimitPercent. If not specified, rows are scanned in the order BigQuery reads them. Default value: "TOP" Possible values: ["TOP", "RANDOM_START"]`,
248+
Default: "TOP",
249+
},
228250
},
229251
},
230252
},
@@ -1086,6 +1108,12 @@ func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptions(v
10861108
transformed := make(map[string]interface{})
10871109
transformed["table_reference"] =
10881110
flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReference(original["tableReference"], d, config)
1111+
transformed["rows_limit"] =
1112+
flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsRowsLimit(original["rowsLimit"], d, config)
1113+
transformed["rows_limit_percent"] =
1114+
flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsRowsLimitPercent(original["rowsLimitPercent"], d, config)
1115+
transformed["sample_method"] =
1116+
flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsSampleMethod(original["sampleMethod"], d, config)
10891117
return []interface{}{transformed}
10901118
}
10911119
func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReference(v interface{}, d *schema.ResourceData, config *Config) interface{} {
@@ -1117,6 +1145,44 @@ func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTa
11171145
return v
11181146
}
11191147

1148+
func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsRowsLimit(v interface{}, d *schema.ResourceData, config *Config) interface{} {
1149+
// Handles the string fixed64 format
1150+
if strVal, ok := v.(string); ok {
1151+
if intVal, err := stringToFixed64(strVal); err == nil {
1152+
return intVal
1153+
}
1154+
}
1155+
1156+
// number values are represented as float64
1157+
if floatVal, ok := v.(float64); ok {
1158+
intVal := int(floatVal)
1159+
return intVal
1160+
}
1161+
1162+
return v // let terraform core handle it otherwise
1163+
}
1164+
1165+
func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsRowsLimitPercent(v interface{}, d *schema.ResourceData, config *Config) interface{} {
1166+
// Handles the string fixed64 format
1167+
if strVal, ok := v.(string); ok {
1168+
if intVal, err := stringToFixed64(strVal); err == nil {
1169+
return intVal
1170+
}
1171+
}
1172+
1173+
// number values are represented as float64
1174+
if floatVal, ok := v.(float64); ok {
1175+
intVal := int(floatVal)
1176+
return intVal
1177+
}
1178+
1179+
return v // let terraform core handle it otherwise
1180+
}
1181+
1182+
func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsSampleMethod(v interface{}, d *schema.ResourceData, config *Config) interface{} {
1183+
return v
1184+
}
1185+
11201186
func flattenDataLossPreventionJobTriggerInspectJobActions(v interface{}, d *schema.ResourceData, config *Config) interface{} {
11211187
if v == nil {
11221188
return v
@@ -1671,6 +1737,27 @@ func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptions(v
16711737
transformed["tableReference"] = transformedTableReference
16721738
}
16731739

1740+
transformedRowsLimit, err := expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsRowsLimit(original["rows_limit"], d, config)
1741+
if err != nil {
1742+
return nil, err
1743+
} else if val := reflect.ValueOf(transformedRowsLimit); val.IsValid() && !isEmptyValue(val) {
1744+
transformed["rowsLimit"] = transformedRowsLimit
1745+
}
1746+
1747+
transformedRowsLimitPercent, err := expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsRowsLimitPercent(original["rows_limit_percent"], d, config)
1748+
if err != nil {
1749+
return nil, err
1750+
} else if val := reflect.ValueOf(transformedRowsLimitPercent); val.IsValid() && !isEmptyValue(val) {
1751+
transformed["rowsLimitPercent"] = transformedRowsLimitPercent
1752+
}
1753+
1754+
transformedSampleMethod, err := expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsSampleMethod(original["sample_method"], d, config)
1755+
if err != nil {
1756+
return nil, err
1757+
} else if val := reflect.ValueOf(transformedSampleMethod); val.IsValid() && !isEmptyValue(val) {
1758+
transformed["sampleMethod"] = transformedSampleMethod
1759+
}
1760+
16741761
return transformed, nil
16751762
}
16761763

@@ -1719,6 +1806,18 @@ func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTab
17191806
return v, nil
17201807
}
17211808

1809+
func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsRowsLimit(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
1810+
return v, nil
1811+
}
1812+
1813+
func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsRowsLimitPercent(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
1814+
return v, nil
1815+
}
1816+
1817+
func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsSampleMethod(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
1818+
return v, nil
1819+
}
1820+
17221821
func expandDataLossPreventionJobTriggerInspectJobActions(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
17231822
l := v.([]interface{})
17241823
req := make([]interface{}, 0, len(l))

google/resource_data_loss_prevention_job_trigger_generated_test.go

+138-2
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,8 @@ resource "google_data_loss_prevention_job_trigger" "basic" {
6868
save_findings {
6969
output_config {
7070
table {
71-
project_id = "asdf"
72-
dataset_id = "asdf"
71+
project_id = "project"
72+
dataset_id = "dataset"
7373
}
7474
}
7575
}
@@ -86,6 +86,142 @@ resource "google_data_loss_prevention_job_trigger" "basic" {
8686
`, context)
8787
}
8888

89+
func TestAccDataLossPreventionJobTrigger_dlpJobTriggerBigqueryRowLimitExample(t *testing.T) {
90+
t.Parallel()
91+
92+
context := map[string]interface{}{
93+
"project": getTestProjectFromEnv(),
94+
"random_suffix": randString(t, 10),
95+
}
96+
97+
vcrTest(t, resource.TestCase{
98+
PreCheck: func() { testAccPreCheck(t) },
99+
Providers: testAccProviders,
100+
CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t),
101+
Steps: []resource.TestStep{
102+
{
103+
Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerBigqueryRowLimitExample(context),
104+
},
105+
{
106+
ResourceName: "google_data_loss_prevention_job_trigger.bigquery_row_limit",
107+
ImportState: true,
108+
ImportStateVerify: true,
109+
ImportStateVerifyIgnore: []string{"parent"},
110+
},
111+
},
112+
})
113+
}
114+
115+
func testAccDataLossPreventionJobTrigger_dlpJobTriggerBigqueryRowLimitExample(context map[string]interface{}) string {
116+
return Nprintf(`
117+
resource "google_data_loss_prevention_job_trigger" "bigquery_row_limit" {
118+
parent = "projects/%{project}"
119+
description = "Description"
120+
display_name = "Displayname"
121+
122+
triggers {
123+
schedule {
124+
recurrence_period_duration = "86400s"
125+
}
126+
}
127+
128+
inspect_job {
129+
inspect_template_name = "fake"
130+
actions {
131+
save_findings {
132+
output_config {
133+
table {
134+
project_id = "project"
135+
dataset_id = "dataset"
136+
}
137+
}
138+
}
139+
}
140+
storage_config {
141+
big_query_options {
142+
table_reference {
143+
project_id = "project"
144+
dataset_id = "dataset"
145+
table_id = "table_to_scan"
146+
}
147+
148+
rows_limit = 1000
149+
sample_method = "RANDOM_START"
150+
}
151+
}
152+
}
153+
}
154+
`, context)
155+
}
156+
157+
func TestAccDataLossPreventionJobTrigger_dlpJobTriggerBigqueryRowLimitPercentageExample(t *testing.T) {
158+
t.Parallel()
159+
160+
context := map[string]interface{}{
161+
"project": getTestProjectFromEnv(),
162+
"random_suffix": randString(t, 10),
163+
}
164+
165+
vcrTest(t, resource.TestCase{
166+
PreCheck: func() { testAccPreCheck(t) },
167+
Providers: testAccProviders,
168+
CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t),
169+
Steps: []resource.TestStep{
170+
{
171+
Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerBigqueryRowLimitPercentageExample(context),
172+
},
173+
{
174+
ResourceName: "google_data_loss_prevention_job_trigger.bigquery_row_limit_percentage",
175+
ImportState: true,
176+
ImportStateVerify: true,
177+
ImportStateVerifyIgnore: []string{"parent"},
178+
},
179+
},
180+
})
181+
}
182+
183+
func testAccDataLossPreventionJobTrigger_dlpJobTriggerBigqueryRowLimitPercentageExample(context map[string]interface{}) string {
184+
return Nprintf(`
185+
resource "google_data_loss_prevention_job_trigger" "bigquery_row_limit_percentage" {
186+
parent = "projects/%{project}"
187+
description = "Description"
188+
display_name = "Displayname"
189+
190+
triggers {
191+
schedule {
192+
recurrence_period_duration = "86400s"
193+
}
194+
}
195+
196+
inspect_job {
197+
inspect_template_name = "fake"
198+
actions {
199+
save_findings {
200+
output_config {
201+
table {
202+
project_id = "project"
203+
dataset_id = "dataset"
204+
}
205+
}
206+
}
207+
}
208+
storage_config {
209+
big_query_options {
210+
table_reference {
211+
project_id = "project"
212+
dataset_id = "dataset"
213+
table_id = "table_to_scan"
214+
}
215+
216+
rows_limit_percent = 50
217+
sample_method = "RANDOM_START"
218+
}
219+
}
220+
}
221+
}
222+
`, context)
223+
}
224+
89225
func testAccCheckDataLossPreventionJobTriggerDestroyProducer(t *testing.T) func(s *terraform.State) error {
90226
return func(s *terraform.State) error {
91227
for name, rs := range s.RootModule().Resources {

0 commit comments

Comments
 (0)