Skip to content

Commit 8f1ae22

Browse files
Ensure oneOf condition is honored when expanding the job configs (#12378)
[upstream:4d9d705df98f4e4ccc701178ff5e23ac5d8df011] Signed-off-by: Modular Magician <[email protected]>
1 parent 1b55722 commit 8f1ae22

File tree

3 files changed

+79
-25
lines changed

3 files changed

+79
-25
lines changed

.changelog/12378.txt

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
```release-note:bug
2+
dataproc: ensured oneOf condition is honored when expanding the job configuration for Hive, Pig, Spark-sql, and Presto in `google_dataproc_job`.
3+
```

google-beta/services/dataproc/resource_dataproc_job.go

+41-17
Original file line numberDiff line numberDiff line change
@@ -931,11 +931,17 @@ func flattenHiveJob(job *dataproc.HiveJob) []map[string]interface{} {
931931
func expandHiveJob(config map[string]interface{}) *dataproc.HiveJob {
932932
job := &dataproc.HiveJob{}
933933
if v, ok := config["query_file_uri"]; ok {
934-
job.QueryFileUri = v.(string)
934+
queryFileUri := v.(string)
935+
if len(queryFileUri) != 0 {
936+
job.QueryFileUri = v.(string)
937+
}
935938
}
936939
if v, ok := config["query_list"]; ok {
937-
job.QueryList = &dataproc.QueryList{
938-
Queries: tpgresource.ConvertStringArr(v.([]interface{})),
940+
queryList := v.([]interface{})
941+
if len(queryList) != 0 {
942+
job.QueryList = &dataproc.QueryList{
943+
Queries: tpgresource.ConvertStringArr(queryList),
944+
}
939945
}
940946
}
941947
if v, ok := config["continue_on_failure"]; ok {
@@ -1039,11 +1045,17 @@ func flattenPigJob(job *dataproc.PigJob) []map[string]interface{} {
10391045
func expandPigJob(config map[string]interface{}) *dataproc.PigJob {
10401046
job := &dataproc.PigJob{}
10411047
if v, ok := config["query_file_uri"]; ok {
1042-
job.QueryFileUri = v.(string)
1048+
queryFileUri := v.(string)
1049+
if len(queryFileUri) != 0 {
1050+
job.QueryFileUri = v.(string)
1051+
}
10431052
}
10441053
if v, ok := config["query_list"]; ok {
1045-
job.QueryList = &dataproc.QueryList{
1046-
Queries: tpgresource.ConvertStringArr(v.([]interface{})),
1054+
queryList := v.([]interface{})
1055+
if len(queryList) != 0 {
1056+
job.QueryList = &dataproc.QueryList{
1057+
Queries: tpgresource.ConvertStringArr(queryList),
1058+
}
10471059
}
10481060
}
10491061
if v, ok := config["continue_on_failure"]; ok {
@@ -1140,11 +1152,17 @@ func flattenSparkSqlJob(job *dataproc.SparkSqlJob) []map[string]interface{} {
11401152
func expandSparkSqlJob(config map[string]interface{}) *dataproc.SparkSqlJob {
11411153
job := &dataproc.SparkSqlJob{}
11421154
if v, ok := config["query_file_uri"]; ok {
1143-
job.QueryFileUri = v.(string)
1155+
queryFileUri := v.(string)
1156+
if len(queryFileUri) != 0 {
1157+
job.QueryFileUri = v.(string)
1158+
}
11441159
}
11451160
if v, ok := config["query_list"]; ok {
1146-
job.QueryList = &dataproc.QueryList{
1147-
Queries: tpgresource.ConvertStringArr(v.([]interface{})),
1161+
queryList := v.([]interface{})
1162+
if len(queryList) != 0 {
1163+
job.QueryList = &dataproc.QueryList{
1164+
Queries: tpgresource.ConvertStringArr(queryList),
1165+
}
11481166
}
11491167
}
11501168
if v, ok := config["script_variables"]; ok {
@@ -1241,20 +1259,26 @@ func flattenPrestoJob(job *dataproc.PrestoJob) []map[string]interface{} {
12411259

12421260
func expandPrestoJob(config map[string]interface{}) *dataproc.PrestoJob {
12431261
job := &dataproc.PrestoJob{}
1262+
if v, ok := config["query_file_uri"]; ok {
1263+
queryFileUri := v.(string)
1264+
if len(queryFileUri) != 0 {
1265+
job.QueryFileUri = v.(string)
1266+
}
1267+
}
1268+
if v, ok := config["query_list"]; ok {
1269+
queryList := v.([]interface{})
1270+
if len(queryList) != 0 {
1271+
job.QueryList = &dataproc.QueryList{
1272+
Queries: tpgresource.ConvertStringArr(queryList),
1273+
}
1274+
}
1275+
}
12441276
if v, ok := config["client_tags"]; ok {
12451277
job.ClientTags = tpgresource.ConvertStringArr(v.([]interface{}))
12461278
}
12471279
if v, ok := config["continue_on_failure"]; ok {
12481280
job.ContinueOnFailure = v.(bool)
12491281
}
1250-
if v, ok := config["query_file_uri"]; ok {
1251-
job.QueryFileUri = v.(string)
1252-
}
1253-
if v, ok := config["query_list"]; ok {
1254-
job.QueryList = &dataproc.QueryList{
1255-
Queries: tpgresource.ConvertStringArr(v.([]interface{})),
1256-
}
1257-
}
12581282
if v, ok := config["properties"]; ok {
12591283
job.Properties = tpgresource.ConvertStringMap(v.(map[string]interface{}))
12601284
}

google-beta/services/dataproc/resource_dataproc_job_test.go

+35-8
Original file line numberDiff line numberDiff line change
@@ -265,22 +265,17 @@ func TestAccDataprocJob_Pig(t *testing.T) {
265265
})
266266
}
267267

268-
func TestAccDataprocJob_SparkSql(t *testing.T) {
268+
func testAccDataprocJobSparkSql(t *testing.T, config string) {
269269
t.Parallel()
270-
271270
var job dataproc.Job
272-
rnd := acctest.RandString(t, 10)
273-
networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster")
274-
subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName)
275-
acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName)
276271

277272
acctest.VcrTest(t, resource.TestCase{
278273
PreCheck: func() { acctest.AccTestPreCheck(t) },
279274
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
280275
CheckDestroy: testAccCheckDataprocJobDestroyProducer(t),
281276
Steps: []resource.TestStep{
282277
{
283-
Config: testAccDataprocJob_sparksql(rnd, subnetworkName),
278+
Config: config,
284279
Check: resource.ComposeTestCheckFunc(
285280
testAccCheckDataprocJobExists(t, "google_dataproc_job.sparksql", &job),
286281

@@ -301,6 +296,20 @@ func TestAccDataprocJob_SparkSql(t *testing.T) {
301296
})
302297
}
303298

299+
func TestAccDataprocJob_SparkSql_QueryList(t *testing.T) {
300+
rnd := acctest.RandString(t, 10)
301+
networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster")
302+
subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName)
303+
testAccDataprocJobSparkSql(t, testAccDataprocJob_SparkSql_QueryList(rnd, subnetworkName))
304+
}
305+
306+
func TestAccDataprocJob_SparkSql_QueryFile(t *testing.T) {
307+
rnd := acctest.RandString(t, 10)
308+
networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster")
309+
subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName)
310+
testAccDataprocJobSparkSql(t, testAccDataprocJob_SparkSql_QueryFile(rnd, subnetworkName))
311+
}
312+
304313
func TestAccDataprocJob_Presto(t *testing.T) {
305314
t.Parallel()
306315

@@ -833,7 +842,7 @@ resource "google_dataproc_job" "pig" {
833842

834843
}
835844

836-
func testAccDataprocJob_sparksql(rnd, subnetworkName string) string {
845+
func testAccDataprocJob_SparkSql_QueryList(rnd, subnetworkName string) string {
837846
return fmt.Sprintf(
838847
singleNodeClusterConfig+`
839848
resource "google_dataproc_job" "sparksql" {
@@ -855,6 +864,24 @@ resource "google_dataproc_job" "sparksql" {
855864

856865
}
857866

867+
func testAccDataprocJob_SparkSql_QueryFile(rnd, subnetworkName string) string {
868+
return fmt.Sprintf(
869+
singleNodeClusterConfig+`
870+
resource "google_dataproc_job" "sparksql" {
871+
region = google_dataproc_cluster.basic.region
872+
force_delete = true
873+
placement {
874+
cluster_name = google_dataproc_cluster.basic.name
875+
}
876+
877+
sparksql_config {
878+
query_file_uri = "gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/spark-sql/natality/cigarette_correlations.sql"
879+
}
880+
}
881+
`, rnd, subnetworkName)
882+
883+
}
884+
858885
func testAccDataprocJob_presto(rnd, subnetworkName string) string {
859886
return fmt.Sprintf(`
860887
resource "google_dataproc_cluster" "basic" {

0 commit comments

Comments
 (0)