Skip to content

Commit 5665b23

Browse files
ahzazniharika-98
authored andcommitted
Ensure oneOf condition is honored when expanding the job configs (GoogleCloudPlatform#12378)
1 parent 89976bb commit 5665b23

File tree

2 files changed

+76
-25
lines changed

2 files changed

+76
-25
lines changed

mmv1/third_party/terraform/services/dataproc/resource_dataproc_job.go.tmpl

+41-17
Original file line numberDiff line numberDiff line change
@@ -929,11 +929,17 @@ func flattenHiveJob(job *dataproc.HiveJob) []map[string]interface{} {
929929
func expandHiveJob(config map[string]interface{}) *dataproc.HiveJob {
930930
job := &dataproc.HiveJob{}
931931
if v, ok := config["query_file_uri"]; ok {
932-
job.QueryFileUri = v.(string)
932+
queryFileUri := v.(string)
933+
if len(queryFileUri) != 0 {
934+
job.QueryFileUri = v.(string)
935+
}
933936
}
934937
if v, ok := config["query_list"]; ok {
935-
job.QueryList = &dataproc.QueryList{
936-
Queries: tpgresource.ConvertStringArr(v.([]interface{})),
938+
queryList := v.([]interface{})
939+
if len(queryList) != 0 {
940+
job.QueryList = &dataproc.QueryList{
941+
Queries: tpgresource.ConvertStringArr(queryList),
942+
}
937943
}
938944
}
939945
if v, ok := config["continue_on_failure"]; ok {
@@ -1037,11 +1043,17 @@ func flattenPigJob(job *dataproc.PigJob) []map[string]interface{} {
10371043
func expandPigJob(config map[string]interface{}) *dataproc.PigJob {
10381044
job := &dataproc.PigJob{}
10391045
if v, ok := config["query_file_uri"]; ok {
1040-
job.QueryFileUri = v.(string)
1046+
queryFileUri := v.(string)
1047+
if len(queryFileUri) != 0 {
1048+
job.QueryFileUri = v.(string)
1049+
}
10411050
}
10421051
if v, ok := config["query_list"]; ok {
1043-
job.QueryList = &dataproc.QueryList{
1044-
Queries: tpgresource.ConvertStringArr(v.([]interface{})),
1052+
queryList := v.([]interface{})
1053+
if len(queryList) != 0 {
1054+
job.QueryList = &dataproc.QueryList{
1055+
Queries: tpgresource.ConvertStringArr(queryList),
1056+
}
10451057
}
10461058
}
10471059
if v, ok := config["continue_on_failure"]; ok {
@@ -1138,11 +1150,17 @@ func flattenSparkSqlJob(job *dataproc.SparkSqlJob) []map[string]interface{} {
11381150
func expandSparkSqlJob(config map[string]interface{}) *dataproc.SparkSqlJob {
11391151
job := &dataproc.SparkSqlJob{}
11401152
if v, ok := config["query_file_uri"]; ok {
1141-
job.QueryFileUri = v.(string)
1153+
queryFileUri := v.(string)
1154+
if len(queryFileUri) != 0 {
1155+
job.QueryFileUri = v.(string)
1156+
}
11421157
}
11431158
if v, ok := config["query_list"]; ok {
1144-
job.QueryList = &dataproc.QueryList{
1145-
Queries: tpgresource.ConvertStringArr(v.([]interface{})),
1159+
queryList := v.([]interface{})
1160+
if len(queryList) != 0 {
1161+
job.QueryList = &dataproc.QueryList{
1162+
Queries: tpgresource.ConvertStringArr(queryList),
1163+
}
11461164
}
11471165
}
11481166
if v, ok := config["script_variables"]; ok {
@@ -1239,20 +1257,26 @@ func flattenPrestoJob(job *dataproc.PrestoJob) []map[string]interface{} {
12391257

12401258
func expandPrestoJob(config map[string]interface{}) *dataproc.PrestoJob {
12411259
job := &dataproc.PrestoJob{}
1260+
if v, ok := config["query_file_uri"]; ok {
1261+
queryFileUri := v.(string)
1262+
if len(queryFileUri) != 0 {
1263+
job.QueryFileUri = v.(string)
1264+
}
1265+
}
1266+
if v, ok := config["query_list"]; ok {
1267+
queryList := v.([]interface{})
1268+
if len(queryList) != 0 {
1269+
job.QueryList = &dataproc.QueryList{
1270+
Queries: tpgresource.ConvertStringArr(queryList),
1271+
}
1272+
}
1273+
}
12421274
if v, ok := config["client_tags"]; ok {
12431275
job.ClientTags = tpgresource.ConvertStringArr(v.([]interface{}))
12441276
}
12451277
if v, ok := config["continue_on_failure"]; ok {
12461278
job.ContinueOnFailure = v.(bool)
12471279
}
1248-
if v, ok := config["query_file_uri"]; ok {
1249-
job.QueryFileUri = v.(string)
1250-
}
1251-
if v, ok := config["query_list"]; ok {
1252-
job.QueryList = &dataproc.QueryList{
1253-
Queries: tpgresource.ConvertStringArr(v.([]interface{})),
1254-
}
1255-
}
12561280
if v, ok := config["properties"]; ok {
12571281
job.Properties = tpgresource.ConvertStringMap(v.(map[string]interface{}))
12581282
}

mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl

+35-8
Original file line numberDiff line numberDiff line change
@@ -263,22 +263,17 @@ func TestAccDataprocJob_Pig(t *testing.T) {
263263
})
264264
}
265265

266-
func TestAccDataprocJob_SparkSql(t *testing.T) {
266+
func testAccDataprocJobSparkSql(t *testing.T, config string) {
267267
t.Parallel()
268-
269268
var job dataproc.Job
270-
rnd := acctest.RandString(t, 10)
271-
networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster")
272-
subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName)
273-
acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName)
274269

275270
acctest.VcrTest(t, resource.TestCase{
276271
PreCheck: func() { acctest.AccTestPreCheck(t) },
277272
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
278273
CheckDestroy: testAccCheckDataprocJobDestroyProducer(t),
279274
Steps: []resource.TestStep{
280275
{
281-
Config: testAccDataprocJob_sparksql(rnd, subnetworkName),
276+
Config: config,
282277
Check: resource.ComposeTestCheckFunc(
283278
testAccCheckDataprocJobExists(t, "google_dataproc_job.sparksql", &job),
284279

@@ -299,6 +294,20 @@ func TestAccDataprocJob_SparkSql(t *testing.T) {
299294
})
300295
}
301296

297+
func TestAccDataprocJob_SparkSql_QueryList(t *testing.T) {
298+
rnd := acctest.RandString(t, 10)
299+
networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster")
300+
subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName)
301+
testAccDataprocJobSparkSql(t, testAccDataprocJob_SparkSql_QueryList(rnd, subnetworkName))
302+
}
303+
304+
func TestAccDataprocJob_SparkSql_QueryFile(t *testing.T) {
305+
rnd := acctest.RandString(t, 10)
306+
networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster")
307+
subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName)
308+
testAccDataprocJobSparkSql(t, testAccDataprocJob_SparkSql_QueryFile(rnd, subnetworkName))
309+
}
310+
302311
func TestAccDataprocJob_Presto(t *testing.T) {
303312
t.Parallel()
304313

@@ -831,7 +840,7 @@ resource "google_dataproc_job" "pig" {
831840

832841
}
833842

834-
func testAccDataprocJob_sparksql(rnd, subnetworkName string) string {
843+
func testAccDataprocJob_SparkSql_QueryList(rnd, subnetworkName string) string {
835844
return fmt.Sprintf(
836845
singleNodeClusterConfig+`
837846
resource "google_dataproc_job" "sparksql" {
@@ -853,6 +862,24 @@ resource "google_dataproc_job" "sparksql" {
853862

854863
}
855864

865+
func testAccDataprocJob_SparkSql_QueryFile(rnd, subnetworkName string) string {
866+
return fmt.Sprintf(
867+
singleNodeClusterConfig+`
868+
resource "google_dataproc_job" "sparksql" {
869+
region = google_dataproc_cluster.basic.region
870+
force_delete = true
871+
placement {
872+
cluster_name = google_dataproc_cluster.basic.name
873+
}
874+
875+
sparksql_config {
876+
query_file_uri = "gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/spark-sql/natality/cigarette_correlations.sql"
877+
}
878+
}
879+
`, rnd, subnetworkName)
880+
881+
}
882+
856883
func testAccDataprocJob_presto(rnd, subnetworkName string) string {
857884
return fmt.Sprintf(`
858885
resource "google_dataproc_cluster" "basic" {

0 commit comments

Comments
 (0)