s3browser-backend/internal/loader/loader.go

165 lines
4.2 KiB
Go
Raw Permalink Normal View History

2021-10-14 17:00:11 +00:00
package loader
import (
"context"
"fmt"
2021-11-22 00:34:22 +00:00
"git.kapelle.org/niklas/s3browser/internal/cache"
2021-10-14 17:00:11 +00:00
"git.kapelle.org/niklas/s3browser/internal/helper"
2021-11-23 19:12:24 +00:00
"git.kapelle.org/niklas/s3browser/internal/s3"
2021-10-14 17:00:11 +00:00
types "git.kapelle.org/niklas/s3browser/internal/types"
"github.com/graph-gophers/dataloader"
)
type Loader struct {
listObjectsLoader *dataloader.Loader
listObjectsRecursiveLoader *dataloader.Loader
statObjectLoader *dataloader.Loader
listBucketsLoader *dataloader.Loader
2021-11-22 00:34:22 +00:00
listObjectsLoaderCache cache.S3Cache
listObjectsRecursiveLoaderCache cache.S3Cache
statObjectLoaderCache cache.S3Cache
listBucketsLoaderCache cache.S3Cache
2021-10-14 17:00:11 +00:00
}
2021-11-27 03:07:27 +00:00
type CacheConfig struct {
ListObjectsLoaderCache cache.S3Cache
ListObjectsRecursiveLoaderCache cache.S3Cache
StatObjectLoaderCache cache.S3Cache
ListBucketsLoaderCache cache.S3Cache
}
func NewLoader(cacheConfig CacheConfig) *Loader {
listObjectsLoaderCache := cacheConfig.ListObjectsLoaderCache
listObjectsRecursiveLoaderCache := cacheConfig.ListObjectsRecursiveLoaderCache
statObjectLoaderCache := cacheConfig.StatObjectLoaderCache
listBucketsLoaderCache := cacheConfig.ListBucketsLoaderCache
2021-11-22 00:34:22 +00:00
2021-10-14 17:00:11 +00:00
return &Loader{
listObjectsLoader: dataloader.NewBatchedLoader(
listObjectsBatch,
2021-11-22 00:34:22 +00:00
dataloader.WithCache(listObjectsLoaderCache),
2021-10-14 17:00:11 +00:00
),
2021-11-22 00:34:22 +00:00
listObjectsLoaderCache: listObjectsLoaderCache,
2021-10-14 17:00:11 +00:00
listObjectsRecursiveLoader: dataloader.NewBatchedLoader(
listObjectsRecursiveBatch,
2021-11-22 00:34:22 +00:00
dataloader.WithCache(listObjectsRecursiveLoaderCache),
2021-10-14 17:00:11 +00:00
),
2021-11-22 00:34:22 +00:00
listObjectsRecursiveLoaderCache: listObjectsRecursiveLoaderCache,
2021-10-14 17:00:11 +00:00
statObjectLoader: dataloader.NewBatchedLoader(
statObjectBatch,
2021-11-22 00:34:22 +00:00
dataloader.WithCache(statObjectLoaderCache),
2021-10-14 17:00:11 +00:00
),
2021-11-22 00:34:22 +00:00
statObjectLoaderCache: statObjectLoaderCache,
2021-10-14 17:00:11 +00:00
listBucketsLoader: dataloader.NewBatchedLoader(
listBucketsBatch,
2021-11-22 00:34:22 +00:00
dataloader.WithCache(listBucketsLoaderCache),
2021-10-14 17:00:11 +00:00
),
2021-11-22 00:34:22 +00:00
listBucketsLoaderCache: listBucketsLoaderCache,
2021-10-14 17:00:11 +00:00
}
}
func (l *Loader) GetFiles(ctx context.Context, path types.ID) ([]types.File, error) {
thunk := l.listObjectsLoader.Load(ctx, path)
objects, err := thunk()
if err != nil {
return nil, err
}
var files []types.File
2021-11-23 19:12:24 +00:00
for _, obj := range objects.([]s3.Object) {
if !obj.ID.IsDirectory() {
2021-10-14 17:00:11 +00:00
files = append(files, *helper.ObjInfoToFile(obj, path.Bucket))
}
}
return files, nil
}
func (l *Loader) GetFile(ctx context.Context, id types.ID) (*types.File, error) {
thunk := l.statObjectLoader.Load(ctx, id)
result, err := thunk()
if err != nil {
return nil, err
}
2021-11-23 19:12:24 +00:00
objInfo, ok := result.(*s3.Object)
2021-10-14 17:00:11 +00:00
if !ok {
return nil, fmt.Errorf("Failed to stats object")
}
2021-11-23 19:12:24 +00:00
return helper.ObjInfoToFile(*objInfo, id.Bucket), nil
2021-10-14 17:00:11 +00:00
}
func (l *Loader) GetDirs(ctx context.Context, path types.ID) ([]types.Directory, error) {
thunk := l.listObjectsLoader.Load(ctx, path)
result, err := thunk()
if err != nil {
return nil, err
}
var dirs []types.Directory
2021-11-23 19:12:24 +00:00
for _, obj := range result.([]s3.Object) {
if obj.ID.IsDirectory() {
2021-10-14 17:00:11 +00:00
dirs = append(dirs, types.Directory{
2021-11-23 19:12:24 +00:00
ID: obj.ID,
2021-10-14 17:00:11 +00:00
})
}
}
return dirs, nil
}
func (l *Loader) GetBuckets(ctx context.Context) ([]string, error) {
thunk := l.listBucketsLoader.Load(ctx, dataloader.StringKey(""))
result, err := thunk()
if err != nil {
return nil, err
}
2021-11-23 19:12:24 +00:00
return result.([]string), nil
2021-10-14 17:00:11 +00:00
}
func (l *Loader) GetFilesRecursive(ctx context.Context, path types.ID) ([]types.File, error) {
thunk := l.listObjectsRecursiveLoader.Load(ctx, path)
result, err := thunk()
if err != nil {
return nil, err
}
2021-11-23 19:12:24 +00:00
objects := result.([]s3.Object)
2021-10-14 17:00:11 +00:00
var files []types.File
for _, obj := range objects {
files = append(files, *helper.ObjInfoToFile(obj, path.Bucket))
}
return files, nil
}
2021-11-22 00:34:22 +00:00
func (l *Loader) InvalidedCacheForId(ctx context.Context, id types.ID) {
2021-10-14 17:00:11 +00:00
parent := id.Parent()
2021-10-14 21:46:48 +00:00
l.statObjectLoader.Clear(ctx, id)
2021-10-14 17:00:11 +00:00
2021-11-22 00:34:22 +00:00
// Code below is useless for now until we use a propper cache for "listObjectsLoader" and "listObjectsRecursiveLoader"
// TODO: implement cache invalidation for "listObjectsLoader" and "listObjectsRecursiveLoader"
l.listObjectsLoader.Clear(ctx, id).Clear(ctx, parent)
2021-10-14 17:00:11 +00:00
2021-11-22 00:34:22 +00:00
// Remove up from recursive list
for rParent := parent; rParent != nil; rParent = rParent.Parent() {
l.listObjectsRecursiveLoader.Clear(ctx, rParent)
}
2021-10-14 17:00:11 +00:00
}