Skip to content

Commit

Permalink
feat(storage): seed buckets locally from config file (#2460)
Browse files Browse the repository at this point in the history
* feat(storage): create buckets locally from config file

* feat: add command for seeding buckets

* chore: add example config for buckets
  • Loading branch information
sweatybridge authored Jul 2, 2024
1 parent 08780a5 commit 576f675
Show file tree
Hide file tree
Showing 11 changed files with 205 additions and 10 deletions.
41 changes: 41 additions & 0 deletions cmd/seed.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
package cmd

import (
"os"
"os/signal"

"github.com/spf13/cobra"
"github.com/supabase/cli/internal/seed/buckets"
"github.com/supabase/cli/internal/utils"
)

var (
seedCmd = &cobra.Command{
GroupID: groupLocalDev,
Use: "seed",
Short: "Seed a Supabase project from " + utils.ConfigPath,
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
ctx, _ := signal.NotifyContext(cmd.Context(), os.Interrupt)
cmd.SetContext(ctx)
return cmd.Root().PersistentPreRunE(cmd, args)
},
}

bucketsCmd = &cobra.Command{
Use: "buckets",
Short: "Seed buckets declared in [storage.buckets]",
Args: cobra.NoArgs,
RunE: func(cmd *cobra.Command, args []string) error {
return buckets.Run(cmd.Context())
},
}
)

func init() {
seedFlags := seedCmd.PersistentFlags()
seedFlags.Bool("linked", false, "Seeds the linked project.")
seedFlags.Bool("local", true, "Seeds the local database.")
seedCmd.MarkFlagsMutuallyExclusive("local", "linked")
seedCmd.AddCommand(bucketsCmd)
rootCmd.AddCommand(seedCmd)
}
80 changes: 80 additions & 0 deletions internal/seed/buckets/buckets.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
package buckets

import (
"context"
"fmt"
"os"

"github.com/supabase/cli/internal/storage/client"
"github.com/supabase/cli/internal/utils"
"github.com/supabase/cli/internal/utils/flags"
"github.com/supabase/cli/pkg/storage"
)

func Run(ctx context.Context) error {
api, err := client.NewStorageAPI(ctx, flags.ProjectRef)
if err != nil {
return err
}
buckets, err := api.ListBuckets(ctx)
if err != nil {
return err
}
var exists []string
console := utils.NewConsole()
for _, b := range buckets {
props := NewBucketProps(b.Name)
if props == nil {
continue
}
exists = append(exists, b.Name)
label := fmt.Sprintf("Bucket %s already exists. Do you want to overwrite its properties?", utils.Bold(b.Id))
if shouldOverwrite, err := console.PromptYesNo(ctx, label, true); err != nil {
return err
} else if !shouldOverwrite {
continue
}
body := storage.UpdateBucketRequest{
Id: b.Id,
BucketProps: props,
}
if _, err := api.UpdateBucket(ctx, body); err != nil {
return err
}
}
for name := range utils.Config.Storage.Buckets {
if utils.SliceContains(exists, name) {
continue
}
msg := "Creating storage bucket:"
if len(flags.ProjectRef) == 0 {
msg = "Creating local storage bucket:"
}
fmt.Fprintln(os.Stderr, msg, name)
body := storage.CreateBucketRequest{
Name: name,
BucketProps: NewBucketProps(name),
}
if _, err := api.CreateBucket(ctx, body); err != nil {
return err
}
}
return nil
}

func NewBucketProps(name string) *storage.BucketProps {
config, ok := utils.Config.Storage.Buckets[name]
if !ok {
return nil
}
props := storage.BucketProps{
Public: config.Public,
AllowedMimeTypes: config.AllowedMimeTypes,
}
if config.FileSizeLimit > 0 {
props.FileSizeLimit = int(config.FileSizeLimit)
} else {
props.FileSizeLimit = int(utils.Config.Storage.FileSizeLimit)
}
return &props
}
9 changes: 9 additions & 0 deletions internal/start/start.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import (
"github.com/spf13/afero"
"github.com/supabase/cli/internal/db/start"
"github.com/supabase/cli/internal/functions/serve"
"github.com/supabase/cli/internal/seed/buckets"
"github.com/supabase/cli/internal/services"
"github.com/supabase/cli/internal/status"
"github.com/supabase/cli/internal/utils"
Expand Down Expand Up @@ -1022,6 +1023,14 @@ EOF
}

p.Send(utils.StatusMsg("Waiting for health checks..."))
if utils.NoBackupVolume && utils.SliceContains(started, utils.StorageId) {
if err := start.WaitForHealthyService(ctx, serviceTimeout, utils.StorageId); err != nil {
return err
}
if err := buckets.Run(ctx); err != nil {
return err
}
}
return start.WaitForHealthyService(ctx, serviceTimeout, started...)
}

Expand Down
15 changes: 15 additions & 0 deletions internal/start/start_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import (
"github.com/supabase/cli/internal/testing/apitest"
"github.com/supabase/cli/internal/testing/pgtest"
"github.com/supabase/cli/internal/utils"
"github.com/supabase/cli/pkg/storage"
)

func TestStartCommand(t *testing.T) {
Expand Down Expand Up @@ -172,6 +173,20 @@ func TestDatabaseStart(t *testing.T) {
gock.New("127.0.0.1").
Head("/functions/v1/_internal/health").
Reply(http.StatusOK)
// Seed tenant services
gock.New(utils.Docker.DaemonHost()).
Get("/v" + utils.Docker.ClientVersion() + "/containers/" + utils.StorageId + "/json").
Reply(http.StatusOK).
JSON(types.ContainerJSON{ContainerJSONBase: &types.ContainerJSONBase{
State: &types.ContainerState{
Running: true,
Health: &types.Health{Status: "healthy"},
},
}})
gock.New("127.0.0.1").
Get("/storage/v1/bucket").
Reply(http.StatusOK).
JSON([]storage.BucketResponse{})
// Run test
err := utils.RunProgram(context.Background(), func(p utils.Program, ctx context.Context) error {
return run(p, context.Background(), fsys, []string{}, pgconn.Config{Host: utils.DbId}, conn.Intercept)
Expand Down
12 changes: 8 additions & 4 deletions internal/storage/client/api.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"context"
"fmt"

"github.com/spf13/viper"
"github.com/supabase/cli/internal/utils"
"github.com/supabase/cli/internal/utils/tenant"
"github.com/supabase/cli/pkg/fetcher"
Expand All @@ -15,11 +16,14 @@ func NewStorageAPI(ctx context.Context, projectRef string) (storage.StorageAPI,
token := utils.Config.Auth.ServiceRoleKey
if len(projectRef) > 0 {
server = "https://" + utils.GetSupabaseHost(projectRef)
apiKey, err := tenant.GetApiKeys(ctx, projectRef)
if err != nil {
return storage.StorageAPI{}, err
// Special case for calling storage API without personal access token
if !viper.IsSet("AUTH_SERVICE_ROLE_KEY") {
apiKey, err := tenant.GetApiKeys(ctx, projectRef)
if err != nil {
return storage.StorageAPI{}, err
}
token = apiKey.ServiceRole
}
token = apiKey.ServiceRole
}
api := storage.StorageAPI{Fetcher: fetcher.NewFetcher(
server,
Expand Down
7 changes: 6 additions & 1 deletion internal/storage/cp/cp.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import (

"github.com/go-errors/errors"
"github.com/spf13/afero"
"github.com/supabase/cli/internal/seed/buckets"
"github.com/supabase/cli/internal/storage/client"
"github.com/supabase/cli/internal/storage/ls"
"github.com/supabase/cli/internal/utils"
Expand Down Expand Up @@ -137,7 +138,11 @@ func UploadStorageObjectAll(ctx context.Context, api storage.StorageAPI, remoteP
if err != nil && strings.Contains(err.Error(), `"error":"Bucket not found"`) {
// Retry after creating bucket
if bucket, prefix := client.SplitBucketPrefix(dstPath); len(prefix) > 0 {
if _, err := api.CreateBucket(ctx, bucket); err != nil {
body := storage.CreateBucketRequest{
Name: bucket,
BucketProps: buckets.NewBucketProps(bucket),
}
if _, err := api.CreateBucket(ctx, body); err != nil {
return err
}
err = api.UploadObject(ctx, dstPath, filePath, fsys, opts...)
Expand Down
7 changes: 7 additions & 0 deletions internal/utils/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -373,12 +373,19 @@ type (
FileSizeLimit sizeInBytes `toml:"file_size_limit"`
S3Credentials storageS3Credentials `toml:"-"`
ImageTransformation imageTransformation `toml:"image_transformation"`
Buckets map[string]bucket `toml:"buckets"`
}

imageTransformation struct {
Enabled bool `toml:"enabled"`
}

bucket struct {
Public bool `toml:"public"`
FileSizeLimit sizeInBytes `toml:"file_size_limit"`
AllowedMimeTypes []string `toml:"allowed_mime_types"`
}

storageS3Credentials struct {
AccessKeyId string `toml:"-"`
SecretAccessKey string `toml:"-"`
Expand Down
1 change: 1 addition & 0 deletions internal/utils/misc.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ const (
Pg14Image = "supabase/postgres:14.1.0.89"
Pg15Image = "supabase/postgres:15.1.1.61"
// Append to ServiceImages when adding new dependencies below
// TODO: try https://github.com/axllent/mailpit
KongImage = "library/kong:2.8.1"
InbucketImage = "inbucket/inbucket:3.0.3"
PostgrestImage = "postgrest/postgrest:v12.2.0"
Expand Down
6 changes: 6 additions & 0 deletions internal/utils/templates/init_config.test.toml
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,12 @@ file_size_limit = "50MiB"
[storage.image_transformation]
enabled = false

# Uncomment to configure local storage buckets
[storage.buckets.images]
public = false
file_size_limit = "50MiB"
allowed_mime_types = ["image/png", "image/jpeg"]

[auth]
enabled = true
# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used
Expand Down
6 changes: 6 additions & 0 deletions internal/utils/templates/init_config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,12 @@ file_size_limit = "50MiB"
[storage.image_transformation]
enabled = true

# Uncomment to configure local storage buckets
# [storage.buckets.images]
# public = false
# file_size_limit = "50MiB"
# allowed_mime_types = ["image/png", "image/jpeg"]

[auth]
enabled = true
# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used
Expand Down
31 changes: 26 additions & 5 deletions pkg/storage/buckets.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,20 +27,23 @@ func (s *StorageAPI) ListBuckets(ctx context.Context) ([]BucketResponse, error)
return fetcher.ParseJSON[[]BucketResponse](resp.Body)
}

type CreateBucketRequest struct {
Name string `json:"name"` // "string",
Id string `json:"id,omitempty"` // "string",
type BucketProps struct {
Public bool `json:"public,omitempty"` // false,
FileSizeLimit int `json:"file_size_limit,omitempty"` // 0,
AllowedMimeTypes []string `json:"allowed_mime_types,omitempty"` // ["string"]
}

type CreateBucketRequest struct {
Name string `json:"name"` // "string",
Id string `json:"id,omitempty"` // "string",
*BucketProps
}

type CreateBucketResponse struct {
Name string `json:"name"`
}

func (s *StorageAPI) CreateBucket(ctx context.Context, bucketName string) (CreateBucketResponse, error) {
body := CreateBucketRequest{Name: bucketName}
func (s *StorageAPI) CreateBucket(ctx context.Context, body CreateBucketRequest) (CreateBucketResponse, error) {
resp, err := s.Send(ctx, http.MethodPost, "/storage/v1/bucket", body)
if err != nil {
return CreateBucketResponse{}, err
Expand All @@ -49,6 +52,24 @@ func (s *StorageAPI) CreateBucket(ctx context.Context, bucketName string) (Creat
return fetcher.ParseJSON[CreateBucketResponse](resp.Body)
}

type UpdateBucketRequest struct {
Id string `json:"id"`
*BucketProps
}

type UpdateBucketResponse struct {
Message string `json:"message"`
}

func (s *StorageAPI) UpdateBucket(ctx context.Context, body UpdateBucketRequest) (UpdateBucketResponse, error) {
resp, err := s.Send(ctx, http.MethodPut, "/storage/v1/bucket/"+body.Id, body.BucketProps)
if err != nil {
return UpdateBucketResponse{}, err
}
defer resp.Body.Close()
return fetcher.ParseJSON[UpdateBucketResponse](resp.Body)
}

type DeleteBucketResponse struct {
Message string `json:"message"`
}
Expand Down

0 comments on commit 576f675

Please sign in to comment.