Skip to content

Commit

Permalink
Add support for CephFS and Minio (#191)
Browse files Browse the repository at this point in the history
* Adding a custom S3 provider

Fixes #178

* Adding documentation updates

* Adding a few more checks for Org context switching
  • Loading branch information
safaci2000 authored Sep 5, 2023
1 parent 761a8cb commit ff77357
Show file tree
Hide file tree
Showing 19 changed files with 443 additions and 182 deletions.
3 changes: 1 addition & 2 deletions cmd/backup/library.go
Original file line number Diff line number Diff line change
Expand Up @@ -67,9 +67,8 @@ var downloadLibary = &cobra.Command{
Long: `Download all library from grafana to local file system`,
Aliases: []string{"d"},
Run: func(command *cobra.Command, args []string) {
log.Info("exporting lib elements")
log.Infof("Downloading library for context: '%s'", config.Config().AppConfig.GetContext())
savedFiles := cmd.GetGrafanaSvc().DownloadLibraryElements(nil)
log.Infof("Importing library for context: '%s'", config.Config().AppConfig.GetContext())
cmd.TableObj.AppendHeader(table.Row{"type", "filename"})
for _, file := range savedFiles {
cmd.TableObj.AppendRow(table.Row{"library", file})
Expand Down
2 changes: 1 addition & 1 deletion cmd/tools/organizations.go
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ var getTokenOrgCmd = &cobra.Command{
Long: `display org associated with token`,
Run: func(command *cobra.Command, args []string) {

log.Infof("Listing organizations for context: '%s'", config.Config().AppConfig.GetContext())
log.Infof("Display token organization for context: '%s'", config.Config().AppConfig.GetContext())
cmd.TableObj.AppendHeader(table.Row{"id", "name"})
org := cmd.GetGrafanaSvc().GetTokenOrganization()
if org == nil {
Expand Down
17 changes: 15 additions & 2 deletions config/importer-example.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,22 @@ storage_engine:
any_label:
kind: cloud
cloud_type: s3
access_key_id: ""
secret_key: ""
bucket_name: ""
## The configuration below is mainly intended for OSS alternatives like ceph and minio. If you use a known cloud provider
## like aws, gcs, azure please setup the auth using the provided tooling from the cloud provider.
# For example, having a valid AWS bucket configured in ~/.aws/credentials will be sufficient without needing to provide the auth in the config.
### valid boolean values can be represented as true, "true", or "1"
custom_cloud:
custom: true ## Required, if set to true most of the 'custom' configuration will be disregarded.
kind: cloud
cloud_type: s3
prefix: dummy
bucket_name: "mybucket"
access_id: "" ## this value can also be read from: AWS_ACCESS_KEY. config file is given precedence
secret_key: "" ## same as above, can be read from: AWS_SECRET_KEY with config file is given precedence.
init_bucket: "true" ## Only supported for custom workflows. Will attempt to create a bucket if one does not exist.
endpoint: "http://localhost:9000"
ssl_enabled: "false"

contexts:
testing:
Expand Down
3 changes: 3 additions & 0 deletions config/testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,10 @@ global:

storage_engine:
test:
self_hosted: true
kind: cloud
cloud_type: s3
ssl_enabled: true
bucket_name: ""
endpoint: http://localhost:9000
prefix: ""
6 changes: 3 additions & 3 deletions internal/service/contract.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ type GrafanaService interface {
}

var (
instance *DashNGoImpl
once sync.Once
instance *DashNGoImpl
initServiceOnce sync.Once
)

type DashNGoImpl struct {
Expand All @@ -42,7 +42,7 @@ type DashNGoImpl struct {
}

func NewDashNGoImpl() *DashNGoImpl {
once.Do(func() {
initServiceOnce.Do(func() {
instance = newInstance()
})
return instance
Expand Down
2 changes: 1 addition & 1 deletion internal/service/dashboards.go
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ func (s *DashNGoImpl) DownloadDashboards(filter filters.Filter) []string {
)

boardLinks = s.ListDashboards(filter)
var boards = make([]string, 0)
var boards []string
for _, link := range boardLinks {
dp := dashboards.NewGetDashboardByUIDParams()
dp.UID = link.UID
Expand Down
21 changes: 13 additions & 8 deletions internal/service/organizations.go
Original file line number Diff line number Diff line change
Expand Up @@ -76,17 +76,22 @@ func (s *DashNGoImpl) getOrganization(id int64) (*models.OrgDetailsDTO, error) {
func (s *DashNGoImpl) SetOrganization(id int64) error {
//Removes Org filter
if id <= 1 {
log.Warnf("organization is not a valid value, resetting to default value of 1.")
s.grafanaConf.OrganizationId = 1
}

if s.grafanaConf.IsAdminEnabled() || s.grafanaConf.IsBasicAuth() {
organization, err := s.getOrganization(id)
if err != nil {
return errors.New("invalid org Id, org is not found")
}
s.grafanaConf.OrganizationId = organization.ID
} else {
if s.grafanaConf.IsAdminEnabled() || s.grafanaConf.IsBasicAuth() {
organization, err := s.getOrganization(id)
if err != nil {
return errors.New("invalid org Id, org is not found")
}
s.grafanaConf.OrganizationId = organization.ID
} else {
s.grafanaConf.OrganizationId = id
tokenOrg := s.GetTokenOrganization()
if tokenOrg.ID != id {
log.Fatalf("you have no BasicAuth configured, and token org are non-changeable. Please configure a different token associated with Org %d, OR configure basic auth.", id)
}
s.grafanaConf.OrganizationId = id
}

return config.Config().SaveToDisk(false)
Expand Down
104 changes: 88 additions & 16 deletions internal/service/storage_cloud.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,18 @@ import (
"context"
"errors"
"fmt"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
log "github.com/sirupsen/logrus"
"gocloud.dev/blob"
"gocloud.dev/blob/s3blob"
"os"
"path"
"path/filepath"
"strings"
"sync"
)

type CloudStorage struct {
Expand All @@ -23,6 +30,20 @@ const (
BucketName = "bucket_name"
Prefix = "prefix"
Kind = "kind"
Custom = "custom"
AccessId = "access_id"
SecretKey = "secret_key"
Endpoint = "endpoint"
Region = "region"
SSLEnabled = "ssl_enabled"
InitBucket = "init_bucket"
)

var (
stringEmpty = func(key string) bool {
return key == ""
}
initBucketOnce sync.Once
)

// getCloudLocation appends prefix to path
Expand Down Expand Up @@ -81,7 +102,11 @@ func (s *CloudStorage) FindAllFiles(folder string, fullPath bool) ([]string, err
break
}
if fullPath {
fileList = append(fileList, obj.Key)
if strings.Contains(obj.Key, folderName) {
fileList = append(fileList, obj.Key)
} else {
log.Debugf("%s does not match folder path", obj.Key)
}
} else {
fileList = append(fileList, filepath.Base(obj.Key))
}
Expand All @@ -92,7 +117,9 @@ func (s *CloudStorage) FindAllFiles(folder string, fullPath bool) ([]string, err

func NewCloudStorage(c context.Context) (Storage, error) {
var (
err error
err error
bucketObj *blob.Bucket
errorMsg string
)

contextVal := c.Value(StorageContext)
Expand All @@ -104,30 +131,75 @@ func NewCloudStorage(c context.Context) (Storage, error) {
return nil, errors.New("cannot convert appData to string map")
}

var cloudURL = fmt.Sprintf("%s://%s", appData["cloud_type"], appData["bucket_name"])
//Pattern specifically for Self hosted S3 compatible instances Minio / Ceph
if boolStrCheck(getMapValue(Custom, "false", stringEmpty, appData)) {
var sess *session.Session
creds := credentials.NewStaticCredentials(
getMapValue(AccessId, os.Getenv("AWS_ACCESS_KEY"), stringEmpty, appData),
getMapValue(SecretKey, os.Getenv("AWS_SECRET_KEY"), stringEmpty, appData), "")
sess, err = session.NewSession(&aws.Config{
Credentials: creds,
Endpoint: aws.String(getMapValue(Endpoint, "http://localhost:9000", stringEmpty, appData)),
DisableSSL: aws.Bool(getMapValue(SSLEnabled, "false", stringEmpty, appData) != "true"),
S3ForcePathStyle: aws.Bool(true),
Region: aws.String(getMapValue(Region, "us-east-1", stringEmpty, appData)),
})
bucketObj, err = s3blob.OpenBucket(context.Background(), sess, appData["bucket_name"], nil)
if err != nil {
errorMsg = err.Error()
}
if err == nil && boolStrCheck(getMapValue(InitBucket, "false", stringEmpty, appData)) {
//Attempts to initiate bucket
initBucketOnce.Do(func() {
client := s3.New(sess)
m := s3.CreateBucketInput{
Bucket: aws.String(appData[BucketName]),
}
//attempt to create bucket
_, err := client.CreateBucket(&m)
if err != nil {
log.Warnf("%s bucket already exists or cannot be created", *m.Bucket)
} else {
log.Infof("bucket %s has been created", *m.Bucket)
}
})

bucketObj, err := blob.OpenBucket(c, cloudURL)
if err != nil {
log.Panicf("failed to open bucket %s", cloudURL)
}
}

config := map[string]string{}
} else {
var cloudURL = fmt.Sprintf("%s://%s", appData["cloud_type"], appData["bucket_name"])
bucketObj, err = blob.OpenBucket(c, cloudURL)
errorMsg = fmt.Sprintf("failed to open bucket %s", cloudURL)
}

for key, value := range appData {
stringVal := fmt.Sprintf("%v", value)
if stringVal == "<nil>" {
stringVal = ""
}
config[key] = stringVal
if err != nil {
log.WithError(err).WithField("Msg", errorMsg).Fatal("unable to connect to cloud provider")
}

entity := &CloudStorage{
BucketName: config[BucketName],
BucketName: appData[BucketName],
BucketRef: bucketObj,
}
if val, ok := config["prefix"]; ok {

if val, ok := appData[Prefix]; ok {
entity.Prefix = val
}

return entity, nil
}

// boolStrCheck does a more intelligent bool check as yaml values are converted to "1" or "true" depending
// on how the user configures quotes the value.
func boolStrCheck(val string) bool {
return strings.ToLower(val) == "true" || val == "1"

}

// getMapValue a generic utility that will get a value from a map and return a default if key does not exist
func getMapValue[T comparable](key, defaultValue T, emptyTest func(key T) bool, data map[T]T) T {
val, ok := data[key]
if ok && !emptyTest(val) {
return val
}
return defaultValue
}
11 changes: 6 additions & 5 deletions internal/service/storage_local.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import (

// LocalStorage default storage engine
type LocalStorage struct {
ctx context.Context
}

// ReadFile returns a byte array of file content
Expand All @@ -22,7 +23,7 @@ func (s *LocalStorage) ReadFile(filename string) ([]byte, error) {
return nil, err
}
f := filepath.Base(filename)
data, err := mb.ReadAll(context.Background(), f)
data, err := mb.ReadAll(s.ctx, f)
if err != nil || len(data) == 0 {
return nil, errors.New("unable to read file")
}
Expand All @@ -43,7 +44,7 @@ func (s *LocalStorage) WriteFile(filename string, data []byte) error {
return err
}
f := filepath.Base(filename)
err = mb.WriteAll(context.Background(), f, data, nil)
err = mb.WriteAll(s.ctx, f, data, nil)
if err == nil {
//Remove attribute file being generated by local storage
attrFile := filename + ".attrs"
Expand All @@ -54,7 +55,7 @@ func (s *LocalStorage) WriteFile(filename string, data []byte) error {
return err
}

func (LocalStorage) Name() string {
func (s *LocalStorage) Name() string {
return "LocalStorage"
}

Expand All @@ -67,7 +68,7 @@ func (s *LocalStorage) FindAllFiles(folder string, fullPath bool) ([]string, err
var fileList []string
iterator := mb.List(nil)
for {
obj, err := iterator.Next(context.Background())
obj, err := iterator.Next(s.ctx)
if err != nil {
break
}
Expand All @@ -82,5 +83,5 @@ func (s *LocalStorage) FindAllFiles(folder string, fullPath bool) ([]string, err
}

func NewLocalStorage(ctx context.Context) Storage {
return &LocalStorage{}
return &LocalStorage{ctx: ctx}
}
4 changes: 2 additions & 2 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ import (
"sync"

"github.com/esnet/gdg/cmd"
_ "github.com/esnet/gdg/cmd/backup" // register backup command
_ "github.com/esnet/gdg/cmd/tools" // register tools command
_ "github.com/esnet/gdg/cmd/backup" // register backup command
_ "github.com/esnet/gdg/cmd/tools" // register tools command
)

//go:embed config/importer-example.yml
Expand Down
4 changes: 2 additions & 2 deletions test/cloud_integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ func TestCloudDataSourceCRUD(t *testing.T) {
apiClient.UploadConnections(dsFilter)
dsList := apiClient.ListConnections(dsFilter)
assert.True(t, len(dsList) > 0)
SetupCloudFunction(t, cfg, apiClient, []string{"minio", "testing"})
SetupCloudFunction(t, cfg, apiClient, []string{"s3", "testing"})
//SetupCloudFunction(apiClient, []string{"mem", "testing"})

log.Info("Importing DataSources")
Expand Down Expand Up @@ -60,7 +60,7 @@ func TestDashboardCloudCRUD(t *testing.T) {
assert.True(t, len(boards) > 0)

//SetupCloudFunction(apiClient, []string{"mem", "testing"})
_, apiClient = SetupCloudFunction(t, cfg, apiClient, []string{"minio", "testing"})
_, apiClient = SetupCloudFunction(t, cfg, apiClient, []string{"s3", "testing"})

//At this point all operations are reading/writing from Minio
log.Info("Importing Dashboards")
Expand Down
9 changes: 7 additions & 2 deletions test/integration_common.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,19 @@ func initTest(t *testing.T, cfgName *string) (service.GrafanaService, *viper.Vip
}

func SetupCloudFunction(t *testing.T, cfg *viper.Viper, apiClient service.GrafanaService, params []string) (context.Context, service.GrafanaService) {
_ = os.Setenv("AWS_ACCESS_KEY", "test")
_ = os.Setenv("AWS_SECRET_KEY", "secretsss")
_ = os.Setenv(service.InitBucket, "true")
bucketName := params[1]
var m = map[string]string{
service.InitBucket: "true",
service.CloudType: params[0],
service.Prefix: "dummy",
service.AccessId: "test",
service.SecretKey: "secretsss",
service.BucketName: bucketName,
service.Kind: "cloud",
service.Custom: "true",
service.Endpoint: "http://localhost:9000",
service.SSLEnabled: "false",
}

cfgObj := config.Config().GetAppConfig()
Expand Down
Loading

0 comments on commit ff77357

Please sign in to comment.