s3browser-backend/internal/dataloader.go
2021-08-03 23:10:23 +02:00

232 lines
5.7 KiB
Go

package s3browser
import (
"context"
"fmt"
"path/filepath"
"strings"
"github.com/graph-gophers/dataloader"
"github.com/minio/minio-go/v7"
gocache "github.com/patrickmn/go-cache"
)
// listObjectsBatch batch func for calling s3.ListObjects()
func listObjectsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
var results []*dataloader.Result
s3Client, ok := c.Value("s3Client").(*minio.Client)
if !ok {
return handleLoaderError(k, fmt.Errorf("Failed to get s3Client from context"))
}
objCache, ok := c.Value("objCache").(*gocache.Cache)
if !ok {
return handleLoaderError(k, fmt.Errorf("Failed to get objCache from context"))
}
for _, v := range k {
results = append(results, &dataloader.Result{
Data: listObjects(s3Client, bucketName, v.String(), false, objCache),
})
}
return results
}
// listObjects helper func for listObjectsBatch
func listObjects(s3Client *minio.Client, bukitName, path string, recursive bool, objCache *gocache.Cache) []minio.ObjectInfo {
objectCh := s3Client.ListObjects(context.Background(), bukitName, minio.ListObjectsOptions{
Prefix: path,
Recursive: false,
})
result := make([]minio.ObjectInfo, 0)
for obj := range objectCh {
// Add to object cache
objCache.SetDefault(obj.Key, obj)
result = append(result, obj)
}
return result
}
// getFilesBatch batch func for getting all files in path. Uses "listObjects" dataloader
func getFilesBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
var results []*dataloader.Result
loader, ok := c.Value("loader").(map[string]*dataloader.Loader)
if !ok {
return handleLoaderError(k, fmt.Errorf("Failed to get loader from context"))
}
for _, v := range k {
path := v.String()
files := make([]File, 0)
if !strings.HasSuffix(path, "/") {
path = path + "/"
}
thunk := loader["listObjects"].Load(c, dataloader.StringKey(path))
objects, _ := thunk()
// TODO: handle thunk error
for _, obj := range objects.([]minio.ObjectInfo) {
if obj.Err != nil {
// TODO: how to handle?
} else {
if !strings.HasSuffix(obj.Key, "/") {
files = append(files, File{
ID: obj.Key,
Name: filepath.Base(obj.Key),
Size: obj.Size,
ContentType: obj.ContentType,
ETag: obj.ETag,
})
}
}
}
results = append(results, &dataloader.Result{
Data: files,
Error: nil,
})
}
return results
}
// getFileBatch batch func for getting object info
func getFileBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
var results []*dataloader.Result
s3Client, ok := c.Value("s3Client").(*minio.Client)
if !ok {
return handleLoaderError(k, fmt.Errorf("Failed to get s3Client from context"))
}
objCache, ok := c.Value("objCache").(*gocache.Cache)
if !ok {
return handleLoaderError(k, fmt.Errorf("Failed to get objCache from context"))
}
for _, v := range k {
cachedObj, ok := objCache.Get(v.String())
if ok {
obj := cachedObj.(minio.ObjectInfo)
results = append(results, &dataloader.Result{
Data: &File{
ID: obj.Key,
Size: obj.Size,
ContentType: obj.ContentType,
ETag: obj.ETag,
},
Error: nil,
})
} else {
obj, err := s3Client.StatObject(context.Background(), bucketName, v.String(), minio.StatObjectOptions{})
if err != nil {
results = append(results, &dataloader.Result{
Data: nil,
Error: err,
})
} else {
results = append(results, &dataloader.Result{
Data: &File{
ID: obj.Key,
Size: obj.Size,
ContentType: obj.ContentType,
ETag: obj.ETag,
},
Error: nil,
})
}
}
}
return results
}
// getDirsBatch batch func for getting dirs in a path
func getDirsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
var results []*dataloader.Result
loader, ok := c.Value("loader").(map[string]*dataloader.Loader)
if !ok {
return handleLoaderError(k, fmt.Errorf("Failed to get loader from context"))
}
for _, v := range k {
path := v.String()
dirs := make([]Directory, 0)
if !strings.HasSuffix(path, "/") {
path = path + "/"
}
thunk := loader["listObjects"].Load(c, dataloader.StringKey(path))
objects, _ := thunk()
// TODO: handle thunk error
for _, obj := range objects.([]minio.ObjectInfo) {
if obj.Err != nil {
// TODO: how to handle?
} else {
if strings.HasSuffix(obj.Key, "/") {
dirs = append(dirs, Directory{
ID: obj.Key,
Name: filepath.Base(obj.Key),
})
}
}
}
results = append(results, &dataloader.Result{
Data: dirs,
Error: nil,
})
}
return results
}
// handleLoaderError helper func when the whole batch failed
func handleLoaderError(k dataloader.Keys, err error) []*dataloader.Result {
var results []*dataloader.Result
for range k {
results = append(results, &dataloader.Result{
Data: nil,
Error: err,
})
}
return results
}
// createDataloader create all dataloaders and return a map of them plus a cache for objects
func createDataloader(config AppConfig) (map[string]*dataloader.Loader, *gocache.Cache) {
loaderMap := make(map[string]*dataloader.Loader, 0)
loaderMap["getFiles"] = dataloader.NewBatchedLoader(getFilesBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)))
loaderMap["getFile"] = dataloader.NewBatchedLoader(getFileBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)))
loaderMap["listObjects"] = dataloader.NewBatchedLoader(listObjectsBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)))
loaderMap["getDirs"] = dataloader.NewBatchedLoader(getDirsBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)))
return loaderMap, gocache.New(config.CacheTTL, config.CacheCleanup)
}