s3browser-backend/internal/dataloader.go

246 lines
6.0 KiB
Go
Raw Normal View History

2021-07-26 12:52:36 +00:00
package s3browser
import (
"context"
"fmt"
"path/filepath"
"strings"
"github.com/graph-gophers/dataloader"
"github.com/minio/minio-go/v7"
2021-08-12 15:48:28 +00:00
log "github.com/sirupsen/logrus"
2021-07-26 12:52:36 +00:00
)
// listObjectsBatch batch func for calling s3.ListObjects()
func listObjectsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
2021-08-12 15:48:28 +00:00
log.Debug("listObjectsBatch: ", k.Keys())
2021-07-26 12:52:36 +00:00
var results []*dataloader.Result
s3Client, ok := c.Value("s3Client").(*minio.Client)
if !ok {
return handleLoaderError(k, fmt.Errorf("Failed to get s3Client from context"))
}
for _, v := range k {
results = append(results, &dataloader.Result{
2021-08-06 14:31:07 +00:00
Data: listObjects(s3Client, bucketName, v.String(), false),
Error: nil,
2021-07-26 12:52:36 +00:00
})
}
return results
}
2021-08-27 20:03:19 +00:00
// listObjectsRecursiveBatch just like listObjectsBatch but with recursive set to true
func listObjectsRecursiveBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
log.Debug("listObjectsRecursiveBatch: ", k.Keys())
var results []*dataloader.Result
s3Client, ok := c.Value("s3Client").(*minio.Client)
if !ok {
return handleLoaderError(k, fmt.Errorf("Failed to get s3Client from context"))
}
for _, v := range k {
results = append(results, &dataloader.Result{
Data: listObjects(s3Client, bucketName, v.String(), true),
Error: nil,
})
}
return results
}
2021-07-26 12:52:36 +00:00
// listObjects helper func for listObjectsBatch
2021-08-06 11:48:49 +00:00
func listObjects(s3Client *minio.Client, bukitName, path string, recursive bool) []minio.ObjectInfo {
2021-08-12 15:48:28 +00:00
log.Debug("S3 call 'ListObjects': ", path)
2021-07-26 12:52:36 +00:00
objectCh := s3Client.ListObjects(context.Background(), bukitName, minio.ListObjectsOptions{
Prefix: path,
2021-08-27 20:03:19 +00:00
Recursive: recursive,
2021-07-26 12:52:36 +00:00
})
result := make([]minio.ObjectInfo, 0)
for obj := range objectCh {
result = append(result, obj)
}
return result
}
// getFilesBatch batch func for getting all files in path. Uses "listObjects" dataloader
func getFilesBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
2021-08-12 15:48:28 +00:00
log.Debug("getFilesBatch: ", k.Keys())
2021-07-26 12:52:36 +00:00
var results []*dataloader.Result
loader, ok := c.Value("loader").(map[string]*dataloader.Loader)
if !ok {
return handleLoaderError(k, fmt.Errorf("Failed to get loader from context"))
}
for _, v := range k {
path := v.String()
files := make([]File, 0)
if !strings.HasSuffix(path, "/") {
2021-08-06 14:31:07 +00:00
path += "/"
2021-07-26 12:52:36 +00:00
}
thunk := loader["listObjects"].Load(c, dataloader.StringKey(path))
objects, _ := thunk()
// TODO: handle thunk error
for _, obj := range objects.([]minio.ObjectInfo) {
if obj.Err != nil {
// TODO: how to handle?
2021-08-06 14:31:07 +00:00
} else if !strings.HasSuffix(obj.Key, "/") {
files = append(files, File{
2021-08-06 17:25:07 +00:00
ID: obj.Key,
Name: filepath.Base(obj.Key),
Size: obj.Size,
ContentType: obj.ContentType,
ETag: obj.ETag,
LastModified: obj.LastModified,
2021-08-06 14:31:07 +00:00
})
2021-07-26 12:52:36 +00:00
}
}
results = append(results, &dataloader.Result{
Data: files,
Error: nil,
})
}
return results
}
// getFileBatch batch func for getting object info
func getFileBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
2021-08-12 15:48:28 +00:00
log.Debug("getFileBatch: ", k.Keys())
2021-07-26 12:52:36 +00:00
var results []*dataloader.Result
s3Client, ok := c.Value("s3Client").(*minio.Client)
if !ok {
return handleLoaderError(k, fmt.Errorf("Failed to get s3Client from context"))
}
for _, v := range k {
2021-08-12 15:48:28 +00:00
log.Debug("S3 call 'StatObject': ", v.String())
2021-08-06 11:48:49 +00:00
obj, err := s3Client.StatObject(context.Background(), bucketName, v.String(), minio.StatObjectOptions{})
2021-08-03 21:10:23 +00:00
2021-08-06 11:48:49 +00:00
if err != nil {
results = append(results, &dataloader.Result{
Data: nil,
Error: err,
})
} else {
2021-07-26 12:52:36 +00:00
results = append(results, &dataloader.Result{
Data: &File{
2021-08-06 17:25:07 +00:00
ID: obj.Key,
Size: obj.Size,
ContentType: obj.ContentType,
ETag: obj.ETag,
LastModified: obj.LastModified,
2021-07-26 12:52:36 +00:00
},
Error: nil,
})
}
}
return results
}
// getDirsBatch batch func for getting dirs in a path
func getDirsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
2021-08-12 15:48:28 +00:00
log.Debug("getDirsBatch: ", k.Keys())
2021-07-26 12:52:36 +00:00
var results []*dataloader.Result
loader, ok := c.Value("loader").(map[string]*dataloader.Loader)
if !ok {
return handleLoaderError(k, fmt.Errorf("Failed to get loader from context"))
}
for _, v := range k {
path := v.String()
dirs := make([]Directory, 0)
if !strings.HasSuffix(path, "/") {
2021-08-06 14:31:07 +00:00
path += "/"
2021-07-26 12:52:36 +00:00
}
thunk := loader["listObjects"].Load(c, dataloader.StringKey(path))
objects, _ := thunk()
// TODO: handle thunk error
for _, obj := range objects.([]minio.ObjectInfo) {
if obj.Err != nil {
// TODO: how to handle?
2021-08-06 14:31:07 +00:00
} else if strings.HasSuffix(obj.Key, "/") {
dirs = append(dirs, Directory{
ID: obj.Key,
Name: filepath.Base(obj.Key),
})
2021-07-26 12:52:36 +00:00
}
}
results = append(results, &dataloader.Result{
Data: dirs,
Error: nil,
})
}
return results
}
// handleLoaderError helper func when the whole batch failed
func handleLoaderError(k dataloader.Keys, err error) []*dataloader.Result {
2021-08-12 15:48:28 +00:00
log.Error(err.Error())
2021-07-26 12:52:36 +00:00
var results []*dataloader.Result
for range k {
results = append(results, &dataloader.Result{
Data: nil,
Error: err,
})
}
return results
}
2021-08-03 21:10:23 +00:00
// createDataloader create all dataloaders and return a map of them plus a cache for objects
2021-08-06 11:48:49 +00:00
func createDataloader(config AppConfig) map[string]*dataloader.Loader {
2021-07-26 12:52:36 +00:00
loaderMap := make(map[string]*dataloader.Loader, 0)
2021-08-06 14:31:07 +00:00
loaderMap["getFiles"] = dataloader.NewBatchedLoader(
getFilesBatch,
dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)),
)
loaderMap["getFile"] = dataloader.NewBatchedLoader(
getFileBatch,
dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)),
)
loaderMap["listObjects"] = dataloader.NewBatchedLoader(
listObjectsBatch,
dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)),
)
2021-08-27 20:03:19 +00:00
loaderMap["listObjectsRecursive"] = dataloader.NewBatchedLoader(
listObjectsRecursiveBatch,
dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)),
)
2021-08-06 14:31:07 +00:00
loaderMap["getDirs"] = dataloader.NewBatchedLoader(
getDirsBatch,
dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)),
)
2021-07-26 12:52:36 +00:00
2021-08-06 11:48:49 +00:00
return loaderMap
2021-07-26 12:52:36 +00:00
}