Skip to content

Commit 3eafa96

Browse files
chrisstmodular-magician
authored andcommitted
Refactor storage bucket import logic
Signed-off-by: Modular Magician <[email protected]>
1 parent 9306e08 commit 3eafa96

File tree

3 files changed

+37
-14
lines changed

3 files changed

+37
-14
lines changed

google/resource_sql_database_instance_test.go

+1
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ import (
99
"github.com/hashicorp/terraform/helper/acctest"
1010
"github.com/hashicorp/terraform/helper/resource"
1111
"github.com/hashicorp/terraform/terraform"
12+
1213
sqladmin "google.golang.org/api/sqladmin/v1beta4"
1314
)
1415

google/resource_storage_bucket.go

+26-12
Original file line numberDiff line numberDiff line change
@@ -457,22 +457,25 @@ func resourceStorageBucketRead(d *schema.ResourceData, meta interface{}) error {
457457
// Get the bucket and acl
458458
bucket := d.Get("name").(string)
459459
res, err := config.clientStorage.Buckets.Get(bucket).Do()
460-
461460
if err != nil {
462461
return handleNotFoundError(err, d, fmt.Sprintf("Storage Bucket %q", d.Get("name").(string)))
463462
}
464463
log.Printf("[DEBUG] Read bucket %v at location %v\n\n", res.Name, res.SelfLink)
465464

466-
// We need to get the project associated with this bucket because otherwise import
467-
// won't work properly. That means we need to call the projects.get API with the
468-
// project number, to get the project ID - there's no project ID field in the
469-
// resource response. However, this requires a call to the Compute API, which
470-
// would otherwise not be required for this resource. So, we're going to
471-
// intentionally check whether the project is set *on the resource*. If it is,
472-
// we will not try to fetch the project name. If it is not, either because
473-
// the user intends to use the default provider project, or because the resource
474-
// is currently being imported, we will read it from the API.
475-
if _, ok := d.GetOk("project"); !ok {
465+
// We are trying to support several different use cases for bucket. Buckets are globally
466+
// unique but they are associated with projects internally, but some users want to use
467+
// buckets in a project agnostic way. Thus we will check to see if the project ID has been
468+
// explicitly set and use that first. However if no project is explicitly set, such as during
469+
// import, we will look up the ID from the compute API using the project Number from the
470+
// bucket API response.
471+
// If you are working in a project-agnostic way and have not set the project ID in the provider
472+
// block, or the resource or an environment variable, we use the compute API to lookup the projectID
473+
// from the projectNumber which is included in the bucket API response
474+
if d.Get("project") == "" {
475+
project, _ := getProject(d, config)
476+
d.Set("project", project)
477+
}
478+
if d.Get("project") == "" {
476479
proj, err := config.clientCompute.Projects.Get(strconv.FormatUint(res.ProjectNumber, 10)).Do()
477480
if err != nil {
478481
return err
@@ -585,7 +588,18 @@ func resourceStorageBucketDelete(d *schema.ResourceData, meta interface{}) error
585588
}
586589

587590
func resourceStorageBucketStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
588-
d.Set("name", d.Id())
591+
// We need to support project/bucket_name and bucket_name formats. This will allow
592+
// importing a bucket that is in a different project than the provider default.
593+
// ParseImportID can't be used because having no project will cause an error but it
594+
// is a valid state as the project_id will be retrieved in READ
595+
parts := strings.Split(d.Id(), "/")
596+
if len(parts) == 1 {
597+
d.Set("name", parts[0])
598+
} else if len(parts) > 1 {
599+
d.Set("project", parts[0])
600+
d.Set("name", parts[1])
601+
}
602+
589603
d.Set("force_destroy", false)
590604
return []*schema.ResourceData{d}, nil
591605
}

website/docs/r/storage_bucket.html.markdown

+10-2
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,9 @@ For more information see
1818
and
1919
[API](https://cloud.google.com/storage/docs/json_api/v1/buckets).
2020

21-
**Note**: When importing a bucket or using only the default provider project for bucket creation, you will need to enable the Compute API and will otherwise get an error with a link to the API enablement page. If you would prefer not to enable the Compute API, make sure to explicitly set `project` on the bucket resource.
21+
**Note**: If the project id is not set on the resource or in the provider block it will be dynamically
22+
determined which will require enabling the compute api.
23+
2224

2325
## Example Usage
2426

@@ -141,9 +143,15 @@ exported:
141143

142144
## Import
143145

144-
Storage buckets can be imported using the `name`, e.g.
146+
Storage buckets can be imported using the `name` or `project/name`. If the project is not
147+
passed to the import command it will be inferred from the provider block or environment variables.
148+
If it cannot be inferred it will be queried from the Compute API (this will fail if the API is
149+
not enabled).
150+
151+
e.g.
145152

146153
```
147154
$ terraform import google_storage_bucket.image-store image-store-bucket
155+
$ terraform import google_storage_bucket.image-store tf-test-project/image-store-bucket
148156
```
149157

0 commit comments

Comments
 (0)