even more refactor

This commit is contained in:
2021-10-14 19:00:11 +02:00
parent d2b0364445
commit 439e5473b6
10 changed files with 372 additions and 414 deletions

View File

@@ -5,11 +5,11 @@ import (
"path/filepath"
"time"
"github.com/graph-gophers/dataloader"
"github.com/graphql-go/graphql"
"github.com/graphql-go/graphql/language/ast"
helper "git.kapelle.org/niklas/s3browser/internal/helper"
"git.kapelle.org/niklas/s3browser/internal/loader"
types "git.kapelle.org/niklas/s3browser/internal/types"
)
@@ -198,10 +198,9 @@ func GraphqlTypes() {
return nil, fmt.Errorf("Failed to parse Source for files resolve")
}
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
loader := p.Context.Value("loader").(*loader.Loader)
thunk := loader["getFiles"].Load(p.Context, source.ID)
return thunk()
return loader.GetFiles(p.Context, source.ID)
},
})
@@ -213,10 +212,8 @@ func GraphqlTypes() {
return nil, fmt.Errorf("Failed to parse Source for directories resolve")
}
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
thunk := loader["getDirs"].Load(p.Context, source.ID)
return thunk()
loader := p.Context.Value("loader").(*loader.Loader)
return loader.GetDirs(p.Context, source.ID)
},
})
@@ -258,17 +255,14 @@ func loadFile(p graphql.ResolveParams) (*types.File, error) {
return nil, fmt.Errorf("Failed to parse source for resolve")
}
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
loader := p.Context.Value("loader").(*loader.Loader)
thunk := loader["getFile"].Load(p.Context, source.ID)
result, err := thunk()
file, err := loader.GetFile(p.Context, source.ID)
if err != nil {
return nil, err
}
file, ok := result.(*types.File)
if !ok {
return nil, fmt.Errorf("Failed to load file")
}

View File

@@ -5,10 +5,10 @@ import (
"fmt"
"strings"
"github.com/graph-gophers/dataloader"
"github.com/minio/minio-go/v7"
helper "git.kapelle.org/niklas/s3browser/internal/helper"
"git.kapelle.org/niklas/s3browser/internal/loader"
types "git.kapelle.org/niklas/s3browser/internal/types"
)
@@ -27,8 +27,9 @@ func deleteMutation(ctx context.Context, id types.ID) error {
return err
}
// Invalidate cache
return helper.InvalidateCache(ctx, id)
ctx.Value("loader").(*loader.Loader).InvalidateCacheForFile(ctx, id)
return nil
}
func copyMutation(ctx context.Context, src, dest types.ID) (*types.File, error) {
@@ -57,19 +58,17 @@ func copyMutation(ctx context.Context, src, dest types.ID) (*types.File, error)
return nil, err
}
newid := types.ID{
newID := types.ID{
Bucket: info.Bucket,
Key: info.Key,
}
newid.Normalize()
newID.Normalize()
// Invalidate cache
// TODO: check error
helper.InvalidateCache(ctx, newid)
ctx.Value("loader").(*loader.Loader).InvalidateCacheForFile(ctx, newID)
return &types.File{
ID: newid,
ID: newID,
}, nil
}
@@ -114,7 +113,7 @@ func moveMutation(ctx context.Context, src, dest types.ID) (*types.File, error)
newId.Normalize()
helper.InvalidateCache(ctx, newId)
ctx.Value("loader").(*loader.Loader).InvalidateCacheForFile(ctx, newId)
return &types.File{
ID: newId,
@@ -137,19 +136,17 @@ func createDirectory(ctx context.Context, id types.ID) (*types.Directory, error)
return nil, err
}
newId := types.ID{
newID := types.ID{
Bucket: info.Bucket,
Key: info.Key,
}
newId.Normalize()
newID.Normalize()
// Invalidate cache
// TODO: check error
helper.InvalidateCacheForDir(ctx, newId)
ctx.Value("loader").(*loader.Loader).InvalidateCacheForDir(ctx, newID)
return &types.Directory{
ID: newId,
ID: newID,
}, nil
}
@@ -161,28 +158,26 @@ func deleteDirectory(ctx context.Context, id types.ID) error {
return fmt.Errorf("Failed to get s3Client from context")
}
loader, ok := ctx.Value("loader").(map[string]*dataloader.Loader)
loader, ok := ctx.Value("loader").(*loader.Loader)
if !ok {
return fmt.Errorf("Failed to get dataloader from context")
}
// Get all files inside the directory
thunk := loader["listObjectsRecursive"].Load(ctx, id)
result, err := thunk()
files, err := loader.GetFilesRecursive(ctx, id)
if err != nil {
return err
}
files, ok := result.([]minio.ObjectInfo)
if !ok {
return fmt.Errorf("Failed to get parse result from listObjects")
}
// Delete all child files
err = helper.DeleteMultiple(ctx, *s3Client, files)
var keysToDel []string
for _, file := range files {
keysToDel = append(keysToDel, file.ID.Key)
}
err = helper.DeleteMultiple(ctx, *s3Client, id.Bucket, keysToDel)
if err != nil {
return err
@@ -200,8 +195,7 @@ func deleteDirectory(ctx context.Context, id types.ID) error {
}
}
//Invalidate cache
helper.InvalidateCacheForDir(ctx, id)
loader.InvalidateCacheForDir(ctx, id)
return nil
}

View File

@@ -3,11 +3,11 @@ package gql
import (
"fmt"
"github.com/graph-gophers/dataloader"
"github.com/graphql-go/graphql"
s3errors "git.kapelle.org/niklas/s3browser/internal/errors"
helper "git.kapelle.org/niklas/s3browser/internal/helper"
"git.kapelle.org/niklas/s3browser/internal/loader"
types "git.kapelle.org/niklas/s3browser/internal/types"
log "github.com/sirupsen/logrus"
)
@@ -36,9 +36,8 @@ func GraphqlSchema() (graphql.Schema, error) {
log.Debug("querry 'files': ", path)
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
thunk := loader["getFiles"].Load(p.Context, path)
return thunk()
loader := p.Context.Value("loader").(*loader.Loader)
return loader.GetFiles(p.Context, *path)
},
},
"directories": &graphql.Field{
@@ -61,9 +60,8 @@ func GraphqlSchema() (graphql.Schema, error) {
log.Debug("querry 'directorys': ", path)
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
thunk := loader["getDirs"].Load(p.Context, path)
return thunk()
loader := p.Context.Value("loader").(*loader.Loader)
return loader.GetDirs(p.Context, *path)
},
},
"file": &graphql.Field{
@@ -109,10 +107,8 @@ func GraphqlSchema() (graphql.Schema, error) {
return nil, s3errors.ErrNotAuthenticated
}
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
// The only reason we use a dataloader with a empty key is that we want to cache the result
thunk := loader["listBuckets"].Load(p.Context, dataloader.StringKey(""))
return thunk()
loader := p.Context.Value("loader").(*loader.Loader)
return loader.GetBuckets(p.Context)
},
},
}