Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: databases sdk v2 #1840

Merged
merged 15 commits into from
Jun 2, 2023
7 changes: 7 additions & 0 deletions docs/data-sources/databases.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,13 @@ data "snowflake_databases" "this" {}
<!-- schema generated by tfplugindocs -->
## Schema

### Optional

- `history` (Boolean) Optionally includes dropped databases that have not yet been purged The output also includes an additional `dropped_on` column
- `pattern` (String) Optionally filters the databases by a pattern
- `starts_with` (String) Optionally filters the databases by a pattern
- `terse` (Boolean) Optionally returns only the columns `created_on` and `name` in the results

### Read-Only

- `databases` (List of Object) Snowflake databases (see [below for nested schema](#nestedatt--databases))
Expand Down
94 changes: 59 additions & 35 deletions pkg/datasources/databases.go
Original file line number Diff line number Diff line change
@@ -1,16 +1,36 @@
package datasources

import (
"context"
"database/sql"
"log"
"strconv"

"github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/snowflake"
"github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/jmoiron/sqlx"
)

var databasesSchema = map[string]*schema.Schema{
"terse": {
Type: schema.TypeBool,
Optional: true,
Default: false,
Description: "Optionally returns only the columns `created_on` and `name` in the results",
},
"history": {
Type: schema.TypeBool,
Optional: true,
Default: false,
Description: "Optionally includes dropped databases that have not yet been purged The output also includes an additional `dropped_on` column",
},
"pattern": {
Type: schema.TypeString,
Optional: true,
Description: "Optionally filters the databases by a pattern",
},
"starts_with": {
Type: schema.TypeString,
Optional: true,
Description: "Optionally filters the databases by a pattern",
},
"databases": {
Type: schema.TypeList,
Computed: true,
Expand Down Expand Up @@ -86,41 +106,45 @@ func Databases() *schema.Resource {
// ReadDatabases read the current snowflake account information.
func ReadDatabases(d *schema.ResourceData, meta interface{}) error {
db := meta.(*sql.DB)
dbx := sqlx.NewDb(db, "snowflake")
dbs, err := snowflake.ListDatabases(dbx)
client := sdk.NewClientFromDB(db)
ctx := context.Background()
opts := sdk.ShowDatabasesOptions{}
if terse, ok := d.GetOk("terse"); ok {
opts.Terse = sdk.Bool(terse.(bool))
}
if history, ok := d.GetOk("history"); ok {
opts.History = sdk.Bool(history.(bool))
}
if pattern, ok := d.GetOk("pattern"); ok {
opts.Like = &sdk.Like{
Pattern: sdk.String(pattern.(string)),
}
}
if startsWith, ok := d.GetOk("starts_with"); ok {
opts.StartsWith = sdk.String(startsWith.(string))
}
databases, err := client.Databases.Show(ctx, &opts)
if err != nil {
log.Println("[DEBUG] list databases failed to decode")
d.SetId("")
return nil
return err
}
log.Printf("[DEBUG] list databases: %v", dbs)
d.SetId("databases_read")
databases := []map[string]interface{}{}
for _, db := range dbs {
dbR := map[string]interface{}{}
if !db.DBName.Valid {
continue
}
dbR["name"] = db.DBName.String
dbR["comment"] = db.Comment.String
dbR["owner"] = db.Owner.String
dbR["is_default"] = db.IsDefault.String == "Y"
dbR["is_current"] = db.IsCurrent.String == "Y"
dbR["origin"] = db.Origin.String
dbR["created_on"] = db.CreatedOn.String
dbR["options"] = db.Options.String
dbR["retention_time"] = -1
if db.RetentionTime.Valid {
v, err := strconv.Atoi(db.RetentionTime.String)
if err == nil {
dbR["retention_time"] = v
}
}
databases = append(databases, dbR)
flattenedDatabases := []map[string]interface{}{}
for _, database := range databases {
flattenedDatabase := map[string]interface{}{}
flattenedDatabase["name"] = database.Name
flattenedDatabase["comment"] = database.Comment
flattenedDatabase["owner"] = database.Owner
flattenedDatabase["is_default"] = database.IsDefault
flattenedDatabase["is_current"] = database.IsCurrent
flattenedDatabase["origin"] = database.Origin
flattenedDatabase["created_on"] = database.CreatedOn.String()
flattenedDatabase["options"] = database.Options
flattenedDatabase["retention_time"] = database.RetentionTime
flattenedDatabases = append(flattenedDatabases, flattenedDatabase)
}
databasesErr := d.Set("databases", databases)
if databasesErr != nil {
return databasesErr
err = d.Set("databases", flattenedDatabases)
if err != nil {
return err
}
return nil
}
38 changes: 10 additions & 28 deletions pkg/resources/database.go
Original file line number Diff line number Diff line change
@@ -1,16 +1,15 @@
package resources

import (
"context"
"database/sql"
"errors"
"fmt"
"log"
"strconv"
"strings"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/jmoiron/sqlx"

"github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk"
"github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/snowflake"
)

Expand Down Expand Up @@ -219,40 +218,23 @@ func createDatabaseFromReplica(d *schema.ResourceData, meta interface{}) error {

func ReadDatabase(d *schema.ResourceData, meta interface{}) error {
db := meta.(*sql.DB)
dbx := sqlx.NewDb(db, "snowflake")
client := sdk.NewClientFromDB(db)
ctx := context.Background()
name := d.Id()
id := sdk.NewAccountObjectIdentifier(name)

// perform a "show database" command to ensure that the database is actually there.
stmt := snowflake.NewDatabaseBuilder(name).Show()
row := snowflake.QueryRow(db, stmt)
_, err := snowflake.ScanDatabase(row)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
// If not found, mark resource to be removed from statefile during apply or refresh
log.Printf("[DEBUG] database (%s) not found", name)
d.SetId("")
return nil
}
return fmt.Errorf("unable to scan row for SHOW DATABASES")
}

// there may be more than one database found, so we need to filter. this could probably be combined with the above query
database, err := snowflake.ListDatabase(dbx, name)
database, err := client.Databases.ShowByID(ctx, id)
if err != nil {
return err
}
if err := d.Set("name", database.DBName.String); err != nil {
if err := d.Set("name", database.Name); err != nil {
return err
}
if err := d.Set("comment", database.Comment.String); err != nil {
if err := d.Set("comment", database.Comment); err != nil {
return err
}

i, err := strconv.ParseInt(database.RetentionTime.String, 10, 64)
if err != nil {
return err
}
if err := d.Set("data_retention_time_in_days", i); err != nil {
if err := d.Set("data_retention_time_in_days", database.RetentionTime); err != nil {
return err
}

Expand All @@ -261,7 +243,7 @@ func ReadDatabase(d *schema.ResourceData, meta interface{}) error {
return err
}

if opts := database.Options.String; opts != "" {
if opts := database.Options; opts != "" {
for _, opt := range strings.Split(opts, ", ") {
if opt == "TRANSIENT" {
if err := d.Set("is_transient", true); err != nil {
Expand Down
37 changes: 37 additions & 0 deletions pkg/sdk/common_types.go
Original file line number Diff line number Diff line change
@@ -1,5 +1,42 @@
package sdk

import (
"errors"
"time"
)

type TimeTravel struct {
Timestamp *time.Time `ddl:"parameter,single_quotes,arrow_equals" sql:"TIMESTAMP"`
Offset *int `ddl:"parameter,single_quotes,arrow_equals" sql:"OFFSET"`
Statement *string `ddl:"parameter,single_quotes,arrow_equals" sql:"STATEMENT"`
}

func (v *TimeTravel) validate() error {
if !exactlyOneValueSet(v.Timestamp, v.Offset, v.Statement) {
return errors.New("exactly one of TIMESTAMP, OFFSET or STATEMENT can be set")
}
return nil
}

type Clone struct {
SourceObject ObjectIdentifier
At *TimeTravel `ddl:"list,no_comma" sql:"AT"`
Before *TimeTravel `ddl:"list,no_comma" sql:"BEFORE"`
}

func (v *Clone) validate() error {
if everyValueSet(v.At, v.Before) {
return errors.New("only one of AT or BEFORE can be set")
}
if valueSet(v.At) {
return v.At.validate()
}
if valueSet(v.Before) {
return v.Before.validate()
}
return nil
}

type LimitFrom struct {
Rows *int `ddl:"keyword"`
From *string `ddl:"parameter,no_equals,single_quotes" sql:"FROM"`
Expand Down
Loading