fixed cache invalidation issues
This commit is contained in:
parent
8296b21883
commit
63b93f5895
@ -3,7 +3,6 @@ package s3browser
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/graph-gophers/dataloader"
|
"github.com/graph-gophers/dataloader"
|
||||||
@ -160,7 +159,7 @@ func graphqlTypes() {
|
|||||||
|
|
||||||
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
|
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
|
||||||
|
|
||||||
thunk := loader["getFiles"].Load(p.Context, dataloader.StringKey(source.ID))
|
thunk := loader["getFiles"].Load(p.Context, dataloader.StringKey(nomalizeID(source.ID)))
|
||||||
return thunk()
|
return thunk()
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
@ -174,7 +173,7 @@ func graphqlTypes() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
|
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
|
||||||
thunk := loader["getDirs"].Load(p.Context, dataloader.StringKey(source.ID))
|
thunk := loader["getDirs"].Load(p.Context, dataloader.StringKey(nomalizeID(source.ID)))
|
||||||
|
|
||||||
return thunk()
|
return thunk()
|
||||||
},
|
},
|
||||||
@ -188,10 +187,8 @@ func graphqlTypes() {
|
|||||||
return nil, fmt.Errorf("Failed to parse Source for directories resolve")
|
return nil, fmt.Errorf("Failed to parse Source for directories resolve")
|
||||||
}
|
}
|
||||||
|
|
||||||
dirs := strings.Split(source.ID, "/")
|
|
||||||
|
|
||||||
return Directory{
|
return Directory{
|
||||||
ID: strings.Join(dirs[:len(dirs)-2], "/") + "/",
|
ID: getParentDir(source.ID),
|
||||||
}, nil
|
}, nil
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
@ -206,7 +203,7 @@ func loadFile(p graphql.ResolveParams) (*File, error) {
|
|||||||
|
|
||||||
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
|
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
|
||||||
|
|
||||||
thunk := loader["getFile"].Load(p.Context, dataloader.StringKey(source.ID))
|
thunk := loader["getFile"].Load(p.Context, dataloader.StringKey(nomalizeID(source.ID)))
|
||||||
result, err := thunk()
|
result, err := thunk()
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/graph-gophers/dataloader"
|
"github.com/graph-gophers/dataloader"
|
||||||
"github.com/minio/minio-go/v7"
|
"github.com/minio/minio-go/v7"
|
||||||
@ -18,13 +19,12 @@ func invalidateCache(ctx context.Context, id string) error {
|
|||||||
|
|
||||||
log.Debug("Invalidate cache for id: ", id)
|
log.Debug("Invalidate cache for id: ", id)
|
||||||
|
|
||||||
|
path := getPathFromId(id)
|
||||||
|
|
||||||
loader["getFile"].Clear(ctx, dataloader.StringKey(id))
|
loader["getFile"].Clear(ctx, dataloader.StringKey(id))
|
||||||
loader["getFiles"].Clear(ctx, dataloader.StringKey(getPathFromId(id)))
|
loader["getFiles"].Clear(ctx, dataloader.StringKey(path))
|
||||||
|
loader["listObjects"].Clear(ctx, dataloader.StringKey(path))
|
||||||
// FIXME: this is the only way i can invalidate the cache without it breaking stuff
|
loader["listObjectsRecursive"].Clear(ctx, dataloader.StringKey(path))
|
||||||
// loader["listObjects"].Clear(ctx, dataloader.StringKey(id))
|
|
||||||
loader["listObjects"].ClearAll()
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -35,7 +35,7 @@ func getPathFromId(id string) string {
|
|||||||
return "/"
|
return "/"
|
||||||
}
|
}
|
||||||
|
|
||||||
return dir + "/"
|
return nomalizeID(dir + "/")
|
||||||
}
|
}
|
||||||
|
|
||||||
func getFilenameFromID(id string) string {
|
func getFilenameFromID(id string) string {
|
||||||
@ -50,11 +50,17 @@ func invalidateCacheForDir(ctx context.Context, path string) error {
|
|||||||
|
|
||||||
log.Debug("Invalidate cache for dir: ", path)
|
log.Debug("Invalidate cache for dir: ", path)
|
||||||
|
|
||||||
// FIXME: only clear required ids
|
parent := getParentDir(path)
|
||||||
loader["getFile"].ClearAll()
|
|
||||||
loader["listObjects"].ClearAll()
|
log.Debug("Cache clear dir: ", path, " parent: ", parent)
|
||||||
loader["getFiles"].ClearAll()
|
|
||||||
loader["getDirs"].ClearAll()
|
loader["getFile"].Clear(ctx, dataloader.StringKey(path))
|
||||||
|
loader["listObjects"].Clear(ctx, dataloader.StringKey(path))
|
||||||
|
loader["listObjectsRecursive"].Clear(ctx, dataloader.StringKey(path))
|
||||||
|
loader["getFiles"].Clear(ctx, dataloader.StringKey(path))
|
||||||
|
loader["getDirs"].Clear(ctx, dataloader.StringKey(parent))
|
||||||
|
loader["listObjects"].Clear(ctx, dataloader.StringKey(parent))
|
||||||
|
loader["listObjectsRecursive"].Clear(ctx, dataloader.StringKey(parent))
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -77,3 +83,28 @@ func deleteMultiple(ctx context.Context, s3Client minio.Client, ids []minio.Obje
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// nomalizeID makes sure there is a leading "/" in the id
|
||||||
|
func nomalizeID(id string) string {
|
||||||
|
if !strings.HasPrefix(id, "/") {
|
||||||
|
if id == "." {
|
||||||
|
return "/"
|
||||||
|
}
|
||||||
|
id = "/" + id
|
||||||
|
}
|
||||||
|
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
func getParentDir(id string) string {
|
||||||
|
dirs := strings.Split(id, "/")
|
||||||
|
|
||||||
|
cut := 1
|
||||||
|
if strings.HasSuffix(id, "/") {
|
||||||
|
cut = 2
|
||||||
|
}
|
||||||
|
|
||||||
|
parent := strings.Join(dirs[:len(dirs)-cut], "/") + "/"
|
||||||
|
|
||||||
|
return nomalizeID(parent)
|
||||||
|
}
|
||||||
|
@ -25,7 +25,7 @@ func deleteMutation(ctx context.Context, id string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Invalidate cache
|
// Invalidate cache
|
||||||
return invalidateCache(ctx, id)
|
return invalidateCache(ctx, nomalizeID(id))
|
||||||
}
|
}
|
||||||
|
|
||||||
func copyMutation(ctx context.Context, src, dest string) (*File, error) {
|
func copyMutation(ctx context.Context, src, dest string) (*File, error) {
|
||||||
@ -56,7 +56,7 @@ func copyMutation(ctx context.Context, src, dest string) (*File, error) {
|
|||||||
|
|
||||||
// Invalidate cache
|
// Invalidate cache
|
||||||
// TODO: check error
|
// TODO: check error
|
||||||
invalidateCache(ctx, info.Key)
|
invalidateCache(ctx, nomalizeID(info.Key))
|
||||||
|
|
||||||
return &File{
|
return &File{
|
||||||
ID: info.Key,
|
ID: info.Key,
|
||||||
@ -97,7 +97,7 @@ func moveMutation(ctx context.Context, src, dest string) (*File, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
invalidateCache(ctx, info.Key)
|
invalidateCache(ctx, nomalizeID(info.Key))
|
||||||
|
|
||||||
return &File{
|
return &File{
|
||||||
ID: info.Key,
|
ID: info.Key,
|
||||||
@ -126,7 +126,7 @@ func createDirectory(ctx context.Context, path string) (*Directory, error) {
|
|||||||
|
|
||||||
// Invalidate cache
|
// Invalidate cache
|
||||||
// TODO: check error
|
// TODO: check error
|
||||||
invalidateCacheForDir(ctx, info.Key)
|
invalidateCacheForDir(ctx, nomalizeID(info.Key))
|
||||||
|
|
||||||
return &Directory{
|
return &Directory{
|
||||||
ID: info.Key,
|
ID: info.Key,
|
||||||
@ -152,7 +152,7 @@ func deleteDirectory(ctx context.Context, path string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get all files inside the directory
|
// Get all files inside the directory
|
||||||
thunk := loader["listObjectsRecursive"].Load(ctx, dataloader.StringKey(path))
|
thunk := loader["listObjectsRecursive"].Load(ctx, dataloader.StringKey(nomalizeID(path)))
|
||||||
|
|
||||||
result, err := thunk()
|
result, err := thunk()
|
||||||
|
|
||||||
@ -185,7 +185,7 @@ func deleteDirectory(ctx context.Context, path string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
//Invalidate cache
|
//Invalidate cache
|
||||||
invalidateCacheForDir(ctx, path)
|
invalidateCacheForDir(ctx, nomalizeID(path))
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user