Skip to content

Commit f63822d

Browse files
add event_stream block to google_storage_transfer_job schema (#8894) (#16004)
* add event_stream block to google_storage_transfer_job schema * refactor optional/required fields * add flatteners/expanders * add flatteners/expanders and tests * update docs * remove ForceNew * add event_stream in update function * add ConflictsWith for schedule/event_stream * use ExactlyOneOf and update docs and tests * Finish event_stream tests * Update mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go * add randomstrings to config * remove copyright headers --------- Signed-off-by: Modular Magician <[email protected]> Co-authored-by: Sarah French <[email protected]>
1 parent 02ba38c commit f63822d

File tree

4 files changed

+238
-5
lines changed

4 files changed

+238
-5
lines changed

.changelog/8894.txt

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
```release-note:enhancement
2+
storagetransfer: added `event_stream` field to `google_storage_transfer_job` resource
3+
```

google/services/storagetransfer/resource_storage_transfer_job.go

+77-3
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,33 @@ func ResourceStorageTransferJob() *schema.Resource {
8282
ForceNew: true,
8383
Description: `The project in which the resource belongs. If it is not provided, the provider project is used.`,
8484
},
85+
"event_stream": {
86+
Type: schema.TypeList,
87+
Optional: true,
88+
MaxItems: 1,
89+
ConflictsWith: []string{"schedule"},
90+
Elem: &schema.Resource{
91+
Schema: map[string]*schema.Schema{
92+
"name": {
93+
Type: schema.TypeString,
94+
Required: true,
95+
Description: "Specifies a unique name of the resource such as AWS SQS ARN in the form 'arn:aws:sqs:region:account_id:queue_name', or Pub/Sub subscription resource name in the form 'projects/{project}/subscriptions/{sub}'",
96+
},
97+
"event_stream_start_time": {
98+
Type: schema.TypeString,
99+
Optional: true,
100+
Description: "Specifies the date and time that Storage Transfer Service starts listening for events from this stream. If no start time is specified or start time is in the past, Storage Transfer Service starts listening immediately",
101+
ValidateFunc: validation.IsRFC3339Time,
102+
},
103+
"event_stream_expiration_time": {
104+
Type: schema.TypeString,
105+
Optional: true,
106+
Description: "Specifies the data and time at which Storage Transfer Service stops listening for events from this stream. After this time, any transfers in progress will complete, but no new transfers are initiated",
107+
ValidateFunc: validation.IsRFC3339Time,
108+
},
109+
},
110+
},
111+
},
85112
"transfer_spec": {
86113
Type: schema.TypeList,
87114
Required: true,
@@ -195,9 +222,10 @@ func ResourceStorageTransferJob() *schema.Resource {
195222
Description: `Notification configuration.`,
196223
},
197224
"schedule": {
198-
Type: schema.TypeList,
199-
Optional: true,
200-
MaxItems: 1,
225+
Type: schema.TypeList,
226+
Optional: true,
227+
MaxItems: 1,
228+
ConflictsWith: []string{"event_stream"},
201229
Elem: &schema.Resource{
202230
Schema: map[string]*schema.Schema{
203231
"schedule_start_date": {
@@ -567,6 +595,7 @@ func resourceStorageTransferJobCreate(d *schema.ResourceData, meta interface{})
567595
ProjectId: project,
568596
Status: d.Get("status").(string),
569597
Schedule: expandTransferSchedules(d.Get("schedule").([]interface{})),
598+
EventStream: expandEventStream(d.Get("event_stream").([]interface{})),
570599
TransferSpec: expandTransferSpecs(d.Get("transfer_spec").([]interface{})),
571600
NotificationConfig: expandTransferJobNotificationConfig(d.Get("notification_config").([]interface{})),
572601
}
@@ -642,6 +671,11 @@ func resourceStorageTransferJobRead(d *schema.ResourceData, meta interface{}) er
642671
return err
643672
}
644673

674+
err = d.Set("event_stream", flattenTransferEventStream(res.EventStream))
675+
if err != nil {
676+
return err
677+
}
678+
645679
err = d.Set("transfer_spec", flattenTransferSpec(res.TransferSpec, d))
646680
if err != nil {
647681
return err
@@ -670,6 +704,13 @@ func resourceStorageTransferJobUpdate(d *schema.ResourceData, meta interface{})
670704
transferJob := &storagetransfer.TransferJob{}
671705
fieldMask := []string{}
672706

707+
if d.HasChange("event_stream") {
708+
fieldMask = append(fieldMask, "event_stream")
709+
if v, ok := d.GetOk("event_stream"); ok {
710+
transferJob.EventStream = expandEventStream(v.([]interface{}))
711+
}
712+
}
713+
673714
if d.HasChange("description") {
674715
fieldMask = append(fieldMask, "description")
675716
if v, ok := d.GetOk("description"); ok {
@@ -899,6 +940,39 @@ func flattenTransferSchedule(transferSchedule *storagetransfer.Schedule) []map[s
899940
return []map[string]interface{}{data}
900941
}
901942

943+
func expandEventStream(e []interface{}) *storagetransfer.EventStream {
944+
if len(e) == 0 || e[0] == nil {
945+
return nil
946+
}
947+
948+
eventStream := e[0].(map[string]interface{})
949+
return &storagetransfer.EventStream{
950+
Name: eventStream["name"].(string),
951+
EventStreamStartTime: eventStream["event_stream_start_time"].(string),
952+
EventStreamExpirationTime: eventStream["event_stream_expiration_time"].(string),
953+
}
954+
}
955+
956+
func flattenTransferEventStream(eventStream *storagetransfer.EventStream) []map[string]interface{} {
957+
if eventStream == nil || reflect.DeepEqual(eventStream, &storagetransfer.EventStream{}) {
958+
return nil
959+
}
960+
961+
data := map[string]interface{}{
962+
"name": eventStream.Name,
963+
}
964+
965+
if eventStream.EventStreamStartTime != "" {
966+
data["event_stream_start_time"] = eventStream.EventStreamStartTime
967+
}
968+
969+
if eventStream.EventStreamExpirationTime != "" {
970+
data["event_stream_expiration_time"] = eventStream.EventStreamExpirationTime
971+
}
972+
973+
return []map[string]interface{}{data}
974+
}
975+
902976
func expandGcsData(gcsDatas []interface{}) *storagetransfer.GcsData {
903977
if len(gcsDatas) == 0 || gcsDatas[0] == nil {
904978
return nil

google/services/storagetransfer/resource_storage_transfer_job_test.go

+146
Original file line numberDiff line numberDiff line change
@@ -201,6 +201,59 @@ func TestAccStorageTransferJob_transferOptions(t *testing.T) {
201201
})
202202
}
203203

204+
func TestAccStorageTransferJob_eventStream(t *testing.T) {
205+
t.Parallel()
206+
207+
testDataSourceBucketName := acctest.RandString(t, 10)
208+
testDataSinkName := acctest.RandString(t, 10)
209+
testTransferJobDescription := acctest.RandString(t, 10)
210+
testPubSubTopicName := fmt.Sprintf("tf-test-topic-%s", acctest.RandString(t, 10))
211+
testEventStreamPubSubTopicName := fmt.Sprintf("tf-test-topic-%s", acctest.RandString(t, 10))
212+
testPubSubSubscriptionName := fmt.Sprintf("tf-test-subscription-%s", acctest.RandString(t, 10))
213+
eventStreamStart := []string{"2014-10-02T15:01:23Z", "2019-10-02T15:01:23Z"}
214+
eventStreamEnd := []string{"2022-10-02T15:01:23Z", "2032-10-02T15:01:23Z"}
215+
216+
acctest.VcrTest(t, resource.TestCase{
217+
PreCheck: func() { acctest.AccTestPreCheck(t) },
218+
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
219+
CheckDestroy: testAccStorageTransferJobDestroyProducer(t),
220+
Steps: []resource.TestStep{
221+
{
222+
Config: testAccStorageTransferJob_basic(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription, testPubSubTopicName),
223+
},
224+
{
225+
ResourceName: "google_storage_transfer_job.transfer_job",
226+
ImportState: true,
227+
ImportStateVerify: true,
228+
},
229+
{
230+
Config: testAccStorageTransferJob_eventStream(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testEventStreamPubSubTopicName, testPubSubSubscriptionName, testTransferJobDescription, eventStreamStart[0], eventStreamEnd[0]),
231+
},
232+
{
233+
ResourceName: "google_storage_transfer_job.transfer_job",
234+
ImportState: true,
235+
ImportStateVerify: true,
236+
},
237+
{
238+
Config: testAccStorageTransferJob_eventStream(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testEventStreamPubSubTopicName, testPubSubSubscriptionName, testTransferJobDescription, eventStreamStart[1], eventStreamEnd[0]),
239+
},
240+
{
241+
ResourceName: "google_storage_transfer_job.transfer_job",
242+
ImportState: true,
243+
ImportStateVerify: true,
244+
},
245+
{
246+
Config: testAccStorageTransferJob_eventStream(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testEventStreamPubSubTopicName, testPubSubSubscriptionName, testTransferJobDescription, eventStreamStart[1], eventStreamEnd[1]),
247+
},
248+
{
249+
ResourceName: "google_storage_transfer_job.transfer_job",
250+
ImportState: true,
251+
ImportStateVerify: true,
252+
},
253+
},
254+
})
255+
}
256+
204257
func TestAccStorageTransferJob_objectConditions(t *testing.T) {
205258
t.Parallel()
206259

@@ -383,6 +436,99 @@ resource "google_storage_transfer_job" "transfer_job" {
383436
`, project, dataSourceBucketName, project, dataSinkBucketName, project, transferJobDescription, project)
384437
}
385438

439+
func testAccStorageTransferJob_eventStream(project string, dataSourceBucketName string, dataSinkBucketName string, pubsubTopicName string, pubsubSubscriptionName string, transferJobDescription string, eventStreamStart string, eventStreamEnd string) string {
440+
return fmt.Sprintf(`
441+
data "google_storage_transfer_project_service_account" "default" {
442+
project = "%s"
443+
}
444+
445+
resource "google_storage_bucket" "data_source" {
446+
name = "%s"
447+
project = "%s"
448+
location = "US"
449+
force_destroy = true
450+
uniform_bucket_level_access = true
451+
}
452+
453+
resource "google_storage_bucket_iam_member" "data_source" {
454+
bucket = google_storage_bucket.data_source.name
455+
role = "roles/storage.admin"
456+
member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}"
457+
}
458+
459+
resource "google_storage_bucket" "data_sink" {
460+
name = "%s"
461+
project = "%s"
462+
location = "US"
463+
force_destroy = true
464+
uniform_bucket_level_access = true
465+
}
466+
467+
resource "google_storage_bucket_iam_member" "data_sink" {
468+
bucket = google_storage_bucket.data_sink.name
469+
role = "roles/storage.admin"
470+
member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}"
471+
}
472+
473+
resource "google_pubsub_subscription_iam_member" "editor" {
474+
subscription = google_pubsub_subscription.example.name
475+
role = "roles/editor"
476+
member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}"
477+
}
478+
479+
resource "google_pubsub_topic" "example" {
480+
name = "%s"
481+
}
482+
483+
resource "google_pubsub_subscription" "example" {
484+
name = "%s"
485+
topic = google_pubsub_topic.example.name
486+
487+
ack_deadline_seconds = 20
488+
489+
labels = {
490+
foo = "bar"
491+
}
492+
493+
push_config {
494+
push_endpoint = "https://example.com/push"
495+
496+
attributes = {
497+
x-goog-version = "v1"
498+
}
499+
}
500+
}
501+
502+
resource "google_storage_transfer_job" "transfer_job" {
503+
description = "%s"
504+
project = "%s"
505+
506+
event_stream {
507+
name = google_pubsub_subscription.example.id
508+
event_stream_start_time = "%s"
509+
event_stream_expiration_time = "%s"
510+
}
511+
512+
transfer_spec {
513+
gcs_data_source {
514+
bucket_name = google_storage_bucket.data_source.name
515+
path = "foo/bar/"
516+
}
517+
gcs_data_sink {
518+
bucket_name = google_storage_bucket.data_sink.name
519+
path = "foo/bar/"
520+
}
521+
}
522+
523+
depends_on = [
524+
google_storage_bucket_iam_member.data_source,
525+
google_storage_bucket_iam_member.data_sink,
526+
google_pubsub_subscription_iam_member.editor,
527+
]
528+
}
529+
`, project, dataSourceBucketName, project, dataSinkBucketName, project, pubsubTopicName, pubsubSubscriptionName, transferJobDescription, project, eventStreamStart, eventStreamEnd)
530+
}
531+
386532
func testAccStorageTransferJob_omitNotificationConfig(project string, dataSourceBucketName string, dataSinkBucketName string, transferJobDescription string) string {
387533
return fmt.Sprintf(`
388534
data "google_storage_transfer_project_service_account" "default" {

website/docs/r/storage_transfer_job.html.markdown

+12-2
Original file line numberDiff line numberDiff line change
@@ -116,10 +116,12 @@ The following arguments are supported:
116116

117117
* `transfer_spec` - (Required) Transfer specification. Structure [documented below](#nested_transfer_spec).
118118

119-
* `schedule` - (Required) Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure [documented below](#nested_schedule).
120-
121119
- - -
122120

121+
* `schedule` - (Optional) Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure [documented below](#nested_schedule). Either `schedule` or `event_stream` must be set.
122+
123+
* `event_stream` - (Optional) Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure [documented below](#nested_event_stream) Either `event_stream` or `schedule` must be set.
124+
123125
* `project` - (Optional) The project in which the resource belongs. If it
124126
is not provided, the provider project is used.
125127

@@ -161,6 +163,14 @@ The following arguments are supported:
161163

162164
* `repeat_interval` - (Optional) Interval between the start of each scheduled transfer. If unspecified, the default value is 24 hours. This value may not be less than 1 hour. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
163165

166+
<a name="nested_event_stream"></a>The `event_stream` block supports:
167+
168+
* `name` - (Required) Specifies a unique name of the resource such as AWS SQS ARN in the form 'arn:aws:sqs:region:account_id:queue_name', or Pub/Sub subscription resource name in the form 'projects/{project}/subscriptions/{sub}'.
169+
170+
* `event_stream_start_time` - (Optional) Specifies the date and time that Storage Transfer Service starts listening for events from this stream. If no start time is specified or start time is in the past, Storage Transfer Service starts listening immediately. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
171+
172+
* `event_stream_expiration_time` - (Optional) Specifies the data and time at which Storage Transfer Service stops listening for events from this stream. After this time, any transfers in progress will complete, but no new transfers are initiated.A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
173+
164174
<a name="nested_object_conditions"></a>The `object_conditions` block supports:
165175

166176
* `max_time_elapsed_since_last_modification` - (Optional) A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".

0 commit comments

Comments
 (0)