package loader import ( "context" "fmt" "git.kapelle.org/niklas/s3browser/internal/cache" "git.kapelle.org/niklas/s3browser/internal/helper" "git.kapelle.org/niklas/s3browser/internal/s3" types "git.kapelle.org/niklas/s3browser/internal/types" "github.com/graph-gophers/dataloader" ) type Loader struct { listObjectsLoader *dataloader.Loader listObjectsRecursiveLoader *dataloader.Loader statObjectLoader *dataloader.Loader listBucketsLoader *dataloader.Loader listObjectsLoaderCache cache.S3Cache listObjectsRecursiveLoaderCache cache.S3Cache statObjectLoaderCache cache.S3Cache listBucketsLoaderCache cache.S3Cache } func NewLoader(config types.AppConfig) *Loader { listObjectsLoaderCache := &dataloader.NoCache{} listObjectsRecursiveLoaderCache := &dataloader.NoCache{} statObjectLoaderCache := cache.NewTTLCache(config.CacheTTL, config.CacheCleanup) listBucketsLoaderCache := cache.NewTTLCache(config.CacheTTL, config.CacheCleanup) return &Loader{ listObjectsLoader: dataloader.NewBatchedLoader( listObjectsBatch, dataloader.WithCache(listObjectsLoaderCache), ), listObjectsLoaderCache: listObjectsLoaderCache, listObjectsRecursiveLoader: dataloader.NewBatchedLoader( listObjectsRecursiveBatch, dataloader.WithCache(listObjectsRecursiveLoaderCache), ), listObjectsRecursiveLoaderCache: listObjectsRecursiveLoaderCache, statObjectLoader: dataloader.NewBatchedLoader( statObjectBatch, dataloader.WithCache(statObjectLoaderCache), ), statObjectLoaderCache: statObjectLoaderCache, listBucketsLoader: dataloader.NewBatchedLoader( listBucketsBatch, dataloader.WithCache(listBucketsLoaderCache), ), listBucketsLoaderCache: listBucketsLoaderCache, } } func (l *Loader) GetFiles(ctx context.Context, path types.ID) ([]types.File, error) { thunk := l.listObjectsLoader.Load(ctx, path) objects, err := thunk() if err != nil { return nil, err } var files []types.File for _, obj := range objects.([]s3.Object) { if !obj.ID.IsDirectory() { files = append(files, *helper.ObjInfoToFile(obj, path.Bucket)) } } return files, nil } func (l *Loader) GetFile(ctx context.Context, id types.ID) (*types.File, error) { thunk := l.statObjectLoader.Load(ctx, id) result, err := thunk() if err != nil { return nil, err } objInfo, ok := result.(*s3.Object) if !ok { return nil, fmt.Errorf("Failed to stats object") } return helper.ObjInfoToFile(*objInfo, id.Bucket), nil } func (l *Loader) GetDirs(ctx context.Context, path types.ID) ([]types.Directory, error) { thunk := l.listObjectsLoader.Load(ctx, path) result, err := thunk() if err != nil { return nil, err } var dirs []types.Directory for _, obj := range result.([]s3.Object) { if obj.ID.IsDirectory() { dirs = append(dirs, types.Directory{ ID: obj.ID, }) } } return dirs, nil } func (l *Loader) GetBuckets(ctx context.Context) ([]string, error) { thunk := l.listBucketsLoader.Load(ctx, dataloader.StringKey("")) result, err := thunk() if err != nil { return nil, err } return result.([]string), nil } func (l *Loader) GetFilesRecursive(ctx context.Context, path types.ID) ([]types.File, error) { thunk := l.listObjectsRecursiveLoader.Load(ctx, path) result, err := thunk() if err != nil { return nil, err } objects := result.([]s3.Object) var files []types.File for _, obj := range objects { files = append(files, *helper.ObjInfoToFile(obj, path.Bucket)) } return files, nil } func (l *Loader) InvalidedCacheForId(ctx context.Context, id types.ID) { parent := id.Parent() l.statObjectLoader.Clear(ctx, id) // Code below is useless for now until we use a propper cache for "listObjectsLoader" and "listObjectsRecursiveLoader" // TODO: implement cache invalidation for "listObjectsLoader" and "listObjectsRecursiveLoader" l.listObjectsLoader.Clear(ctx, id).Clear(ctx, parent) // Remove up from recursive list for rParent := parent; rParent != nil; rParent = rParent.Parent() { l.listObjectsRecursiveLoader.Clear(ctx, rParent) } }