Skip to content

Commit 227a4e2

Browse files
feat(vertexai): add the data source for Vertex AI Index (#6940) (#14640)
Signed-off-by: Modular Magician <[email protected]>
1 parent 0d78102 commit 227a4e2

File tree

5 files changed

+154
-0
lines changed

5 files changed

+154
-0
lines changed

.changelog/6940.txt

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
```release-note:new-datasource
2+
google_vertex_ai_index
3+
```

google/data_source_vertex_ai_index.go

+33
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
package google
2+
3+
import (
4+
"fmt"
5+
6+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
7+
"github.com/hashicorp/terraform-provider-google/google/tpgresource"
8+
transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport"
9+
)
10+
11+
func dataSourceVertexAIIndex() *schema.Resource {
12+
13+
dsSchema := datasourceSchemaFromResourceSchema(ResourceVertexAIIndex().Schema)
14+
15+
addRequiredFieldsToSchema(dsSchema, "name", "region")
16+
addOptionalFieldsToSchema(dsSchema, "project")
17+
18+
return &schema.Resource{
19+
Read: dataSourceVertexAIIndexRead,
20+
Schema: dsSchema,
21+
}
22+
}
23+
24+
func dataSourceVertexAIIndexRead(d *schema.ResourceData, meta interface{}) error {
25+
config := meta.(*transport_tpg.Config)
26+
27+
id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/{{region}}/indexes/{{name}}")
28+
if err != nil {
29+
return fmt.Errorf("Error constructing id: %s", err)
30+
}
31+
d.SetId(id)
32+
return resourceVertexAIIndexRead(d, meta)
33+
}
+91
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
package google
2+
3+
import (
4+
"testing"
5+
6+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
7+
"github.com/hashicorp/terraform-provider-google/google/acctest"
8+
)
9+
10+
func TestAccDataSourceVertexAIIndex_basic(t *testing.T) {
11+
t.Parallel()
12+
13+
context := map[string]interface{}{
14+
"project": acctest.GetTestProjectFromEnv(),
15+
"random_suffix": RandString(t, 10),
16+
}
17+
18+
VcrTest(t, resource.TestCase{
19+
PreCheck: func() { acctest.AccTestPreCheck(t) },
20+
ProtoV5ProviderFactories: ProtoV5ProviderFactories(t),
21+
CheckDestroy: testAccCheckVertexAIIndexDestroyProducer_basic(t),
22+
Steps: []resource.TestStep{
23+
{
24+
Config: testAccDataSourceVertexAIIndex_basic(context),
25+
Check: resource.ComposeTestCheckFunc(
26+
acctest.CheckDataSourceStateMatchesResourceStateWithIgnores(
27+
"data.google_vertex_ai_index.foo",
28+
"google_vertex_ai_index.index",
29+
// The projects.locations.indexes.get doesn't return the following fields
30+
map[string]struct{}{
31+
"metadata.0.contents_delta_uri": {},
32+
"metadata.0.is_complete_overwrite": {},
33+
},
34+
),
35+
),
36+
},
37+
},
38+
})
39+
}
40+
41+
func testAccDataSourceVertexAIIndex_basic(context map[string]interface{}) string {
42+
return Nprintf(`
43+
resource "google_storage_bucket" "bucket" {
44+
name = "%{project}-tf-test-vertex-ai-index-test%{random_suffix}" # Every bucket name must be globally unique
45+
location = "us-central1"
46+
uniform_bucket_level_access = true
47+
}
48+
49+
# The sample data comes from the following link:
50+
# https://cloud.google.com/vertex-ai/docs/matching-engine/filtering#specify-namespaces-tokens
51+
resource "google_storage_bucket_object" "data" {
52+
name = "contents/data.json"
53+
bucket = google_storage_bucket.bucket.name
54+
content = <<EOF
55+
{"id": "42", "embedding": [0.5, 1.0], "restricts": [{"namespace": "class", "allow": ["cat", "pet"]},{"namespace": "category", "allow": ["feline"]}]}
56+
{"id": "43", "embedding": [0.6, 1.0], "restricts": [{"namespace": "class", "allow": ["dog", "pet"]},{"namespace": "category", "allow": ["canine"]}]}
57+
EOF
58+
}
59+
60+
resource "google_vertex_ai_index" "index" {
61+
labels = {
62+
foo = "bar"
63+
}
64+
region = "us-central1"
65+
display_name = "tf-test-test-index%{random_suffix}"
66+
description = "index for test"
67+
metadata {
68+
contents_delta_uri = "gs://${google_storage_bucket.bucket.name}/contents"
69+
config {
70+
dimensions = 2
71+
approximate_neighbors_count = 150
72+
distance_measure_type = "DOT_PRODUCT_DISTANCE"
73+
algorithm_config {
74+
tree_ah_config {
75+
leaf_node_embedding_count = 500
76+
leaf_nodes_to_search_percent = 7
77+
}
78+
}
79+
}
80+
}
81+
index_update_method = "BATCH_UPDATE"
82+
}
83+
84+
data "google_vertex_ai_index" "foo" {
85+
name = google_vertex_ai_index.index.name
86+
region = google_vertex_ai_index.index.region
87+
project = google_vertex_ai_index.index.project
88+
}
89+
90+
`, context)
91+
}

google/provider.go

+1
Original file line numberDiff line numberDiff line change
@@ -711,6 +711,7 @@ func Provider() *schema.Provider {
711711
"google_tpu_tensorflow_versions": DataSourceTpuTensorflowVersions(),
712712
"google_vpc_access_connector": DataSourceVPCAccessConnector(),
713713
"google_redis_instance": DataSourceGoogleRedisInstance(),
714+
"google_vertex_ai_index": dataSourceVertexAIIndex(),
714715
// ####### END datasources ###########
715716
},
716717
ResourcesMap: ResourceMap(),
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
---
2+
subcategory: "VertexAI"
3+
description: |-
4+
A representation of a collection of database items organized in a way that allows for approximate nearest neighbor (a.k.a ANN) algorithms search.
5+
---
6+
7+
# google\_vertex\_ai\_index
8+
9+
A representation of a collection of database items organized in a way that allows for approximate nearest neighbor (a.k.a ANN) algorithms search.
10+
11+
12+
## Argument Reference
13+
14+
The following arguments are supported:
15+
16+
* `name` - (Required) The name of the index.
17+
18+
* `region` - (Required) The region of the index.
19+
20+
- - -
21+
22+
* `project` - (Optional) The ID of the project in which the resource belongs.
23+
24+
## Attributes Reference
25+
26+
See [google_vertex_ai_index](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/vertex_ai_index) resource for details of the available attributes.

0 commit comments

Comments
 (0)