the big refactor 2: return of the ID struct

This commit is contained in:
2021-09-27 01:59:32 +02:00
parent 91e217e472
commit 74037dfab5
8 changed files with 238 additions and 151 deletions

View File

@@ -16,6 +16,7 @@ import (
var graphqlDirType *graphql.Object
var graphqlFileType *graphql.Object
var graphqlLoginResultType *graphql.Object
var objIDType *graphql.Scalar
//GraphqlTypes create all graphql types and stores the in the global variables
func GraphqlTypes() {
@@ -45,7 +46,39 @@ func GraphqlTypes() {
ParseLiteral: func(valueAST ast.Value) interface{} {
switch valueAST := valueAST.(type) {
case *ast.StringValue:
return valueAST.Value
if tval, err := time.Parse(time.RFC3339, valueAST.Value); err != nil {
return nil
} else {
return tval
}
}
return nil
},
})
objIDType = graphql.NewScalar(graphql.ScalarConfig{
Name: "objID",
Description: `String representing a bucket, key and version combination.
Looks like this: "bucketName:/name/of/key" or "bucketName@version:/name/of/key"`,
Serialize: func(value interface{}) interface{} {
switch value := value.(type) {
case types.ID:
return value.String()
}
return "INVALID"
},
ParseValue: func(value interface{}) interface{} {
switch tvalue := value.(type) {
case string:
return types.ParseID(tvalue)
}
return nil
},
ParseLiteral: func(valueAST ast.Value) interface{} {
switch valueAST := valueAST.(type) {
case *ast.StringValue:
return types.ParseID(valueAST.Value)
}
return nil
},
@@ -56,7 +89,7 @@ func GraphqlTypes() {
Description: "Represents a directory",
Fields: graphql.Fields{
"id": &graphql.Field{
Type: graphql.NewNonNull(graphql.ID),
Type: graphql.NewNonNull(objIDType),
},
"name": &graphql.Field{
Type: graphql.String,
@@ -66,7 +99,7 @@ func GraphqlTypes() {
return nil, fmt.Errorf("Failed to parse source for resolve")
}
return filepath.Base(source.ID), nil
return filepath.Base(source.ID.Key), nil
},
},
},
@@ -77,7 +110,7 @@ func GraphqlTypes() {
Description: "Represents a file, not a directory",
Fields: graphql.Fields{
"id": &graphql.Field{
Type: graphql.NewNonNull(graphql.ID),
Type: graphql.NewNonNull(objIDType),
Description: "The uniqe ID of the file. Represents the path and the s3 key.",
},
"name": &graphql.Field{
@@ -88,7 +121,7 @@ func GraphqlTypes() {
return nil, fmt.Errorf("Failed to parse source for resolve")
}
return filepath.Base(source.ID), nil
return filepath.Base(source.ID.Key), nil
},
},
"size": &graphql.Field{
@@ -143,10 +176,14 @@ func GraphqlTypes() {
return nil, fmt.Errorf("Failed to parse Source for parent resolve")
}
basename := helper.GetPathFromId(source.ID)
parent := source.ID.Parent()
if parent == nil {
return nil, nil
}
return types.Directory{
ID: basename,
ID: *source.ID.Parent(),
}, nil
},
},
@@ -163,7 +200,7 @@ func GraphqlTypes() {
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
thunk := loader["getFiles"].Load(p.Context, dataloader.StringKey(helper.NomalizeID(source.ID)))
thunk := loader["getFiles"].Load(p.Context, source.ID)
return thunk()
},
})
@@ -177,7 +214,7 @@ func GraphqlTypes() {
}
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
thunk := loader["getDirs"].Load(p.Context, dataloader.StringKey(helper.NomalizeID(source.ID)))
thunk := loader["getDirs"].Load(p.Context, source.ID)
return thunk()
},
@@ -223,7 +260,7 @@ func loadFile(p graphql.ResolveParams) (*types.File, error) {
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
thunk := loader["getFile"].Load(p.Context, dataloader.StringKey(helper.NomalizeID(source.ID)))
thunk := loader["getFile"].Load(p.Context, source.ID)
result, err := thunk()
if err != nil {

View File

@@ -12,7 +12,7 @@ import (
types "git.kapelle.org/niklas/s3browser/internal/types"
)
func deleteMutation(ctx context.Context, id string) error {
func deleteMutation(ctx context.Context, id types.ID) error {
s3Client, ok := ctx.Value("s3Client").(*minio.Client)
if !ok {
@@ -21,17 +21,17 @@ func deleteMutation(ctx context.Context, id string) error {
// TODO: it is posible to remove multiple objects with a single call.
// Is it better to batch this?
err := s3Client.RemoveObject(ctx, "dev", id, minio.RemoveObjectOptions{})
err := s3Client.RemoveObject(ctx, id.Bucket, id.Key, minio.RemoveObjectOptions{})
if err != nil {
return err
}
// Invalidate cache
return helper.InvalidateCache(ctx, helper.NomalizeID(id))
return helper.InvalidateCache(ctx, id)
}
func copyMutation(ctx context.Context, src, dest string) (*types.File, error) {
func copyMutation(ctx context.Context, src, dest types.ID) (*types.File, error) {
s3Client, ok := ctx.Value("s3Client").(*minio.Client)
if !ok {
@@ -39,35 +39,42 @@ func copyMutation(ctx context.Context, src, dest string) (*types.File, error) {
}
// Check if dest is a file or a dir
if strings.HasSuffix(dest, "/") {
if dest.IsDirectory() {
// create new dest id
// TODO: What if a file with this id already exists?
dest += helper.GetFilenameFromID(src)
dest.Key += helper.GetFilenameFromKey(src.Key)
}
info, err := s3Client.CopyObject(ctx, minio.CopyDestOptions{
Bucket: "dev",
Object: dest,
Bucket: dest.Bucket,
Object: dest.Key,
}, minio.CopySrcOptions{
Bucket: "dev",
Object: src,
Bucket: src.Bucket,
Object: src.Key,
})
if err != nil {
return nil, err
}
newid := types.ID{
Bucket: info.Bucket,
Key: info.Key,
}
newid.Normalize()
// Invalidate cache
// TODO: check error
helper.InvalidateCache(ctx, helper.NomalizeID(info.Key))
helper.InvalidateCache(ctx, newid)
return &types.File{
ID: info.Key,
ID: newid,
}, nil
}
func moveMutation(ctx context.Context, src, dest string) (*types.File, error) {
func moveMutation(ctx context.Context, src, dest types.ID) (*types.File, error) {
s3Client, ok := ctx.Value("s3Client").(*minio.Client)
if !ok {
@@ -75,19 +82,19 @@ func moveMutation(ctx context.Context, src, dest string) (*types.File, error) {
}
// Check if dest is a file or a dir
if strings.HasSuffix(dest, "/") {
if dest.IsDirectory() {
// create new dest id
// TODO: What if a file with this id already exists?
dest += helper.GetFilenameFromID(src)
dest.Key += helper.GetFilenameFromKey(src.Key)
}
// There is no (spoon) move. Only copy and delete
info, err := s3Client.CopyObject(ctx, minio.CopyDestOptions{
Bucket: "dev",
Object: dest,
Bucket: dest.Bucket,
Object: dest.Key,
}, minio.CopySrcOptions{
Bucket: "dev",
Object: src,
Bucket: src.Bucket,
Object: src.Key,
})
if err != nil {
@@ -100,26 +107,29 @@ func moveMutation(ctx context.Context, src, dest string) (*types.File, error) {
return nil, err
}
helper.InvalidateCache(ctx, helper.NomalizeID(info.Key))
newId := types.ID{
Bucket: info.Bucket,
Key: info.Key,
}
newId.Normalize()
helper.InvalidateCache(ctx, newId)
return &types.File{
ID: info.Key,
ID: newId,
}, nil
}
func createDirectory(ctx context.Context, path string) (*types.Directory, error) {
func createDirectory(ctx context.Context, id types.ID) (*types.Directory, error) {
s3Client, ok := ctx.Value("s3Client").(*minio.Client)
if !ok {
return nil, fmt.Errorf("Failed to get s3Client from context")
}
if !strings.HasSuffix(path, "/") {
path += "/"
}
info, err := s3Client.PutObject(ctx, "dev", path, strings.NewReader(""), 0, minio.PutObjectOptions{
info, err := s3Client.PutObject(ctx, id.Bucket, id.Key, strings.NewReader(""), 0, minio.PutObjectOptions{
ContentType: "application/x-directory",
})
@@ -127,17 +137,24 @@ func createDirectory(ctx context.Context, path string) (*types.Directory, error)
return nil, err
}
newId := types.ID{
Bucket: info.Bucket,
Key: info.Key,
}
newId.Normalize()
// Invalidate cache
// TODO: check error
helper.InvalidateCacheForDir(ctx, helper.NomalizeID(info.Key))
helper.InvalidateCacheForDir(ctx, newId)
return &types.Directory{
ID: info.Key,
ID: newId,
}, nil
}
func deleteDirectory(ctx context.Context, path string) error {
func deleteDirectory(ctx context.Context, id types.ID) error {
s3Client, ok := ctx.Value("s3Client").(*minio.Client)
if !ok {
@@ -150,12 +167,8 @@ func deleteDirectory(ctx context.Context, path string) error {
return fmt.Errorf("Failed to get dataloader from context")
}
if !strings.HasSuffix(path, "/") {
path += "/"
}
// Get all files inside the directory
thunk := loader["listObjectsRecursive"].Load(ctx, dataloader.StringKey(helper.NomalizeID(path)))
thunk := loader["listObjectsRecursive"].Load(ctx, id)
result, err := thunk()
@@ -181,14 +194,14 @@ func deleteDirectory(ctx context.Context, path string) error {
// This is at least the behavior when working with minio as s3 backend
// TODO: check if this is normal behavior when working with s3
if len(files) == 0 {
err := s3Client.RemoveObject(ctx, "dev", path, minio.RemoveObjectOptions{})
err := s3Client.RemoveObject(ctx, id.Bucket, id.Key, minio.RemoveObjectOptions{})
if err != nil {
return err
}
}
//Invalidate cache
helper.InvalidateCacheForDir(ctx, helper.NomalizeID(path))
helper.InvalidateCacheForDir(ctx, id)
return nil
}

View File

@@ -20,7 +20,7 @@ func GraphqlSchema() (graphql.Schema, error) {
Type: graphql.NewNonNull(graphql.NewList(graphql.NewNonNull(graphqlFileType))),
Args: graphql.FieldConfigArgument{
"path": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
Type: graphql.NewNonNull(objIDType),
},
},
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
@@ -28,16 +28,16 @@ func GraphqlSchema() (graphql.Schema, error) {
return nil, s3errors.ErrNotAuthenticated
}
path, ok := p.Args["path"].(string)
path, ok := p.Args["path"].(*types.ID)
if !ok {
return nil, nil
return nil, fmt.Errorf("Failed to parse args")
}
log.Debug("querry 'files': ", path)
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
thunk := loader["getFiles"].Load(p.Context, dataloader.StringKey(path))
thunk := loader["getFiles"].Load(p.Context, path)
return thunk()
},
},
@@ -45,7 +45,7 @@ func GraphqlSchema() (graphql.Schema, error) {
Type: graphql.NewNonNull(graphql.NewList(graphql.NewNonNull(graphqlDirType))),
Args: graphql.FieldConfigArgument{
"path": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
Type: graphql.NewNonNull(objIDType),
},
},
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
@@ -53,7 +53,7 @@ func GraphqlSchema() (graphql.Schema, error) {
return nil, s3errors.ErrNotAuthenticated
}
path, ok := p.Args["path"].(string)
path, ok := p.Args["path"].(*types.ID)
if !ok {
return nil, nil
@@ -62,7 +62,7 @@ func GraphqlSchema() (graphql.Schema, error) {
log.Debug("querry 'directorys': ", path)
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
thunk := loader["getDirs"].Load(p.Context, dataloader.StringKey(path))
thunk := loader["getDirs"].Load(p.Context, path)
return thunk()
},
},
@@ -70,7 +70,7 @@ func GraphqlSchema() (graphql.Schema, error) {
Type: graphqlFileType,
Args: graphql.FieldConfigArgument{
"id": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.ID),
Type: graphql.NewNonNull(objIDType),
},
},
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
@@ -78,7 +78,7 @@ func GraphqlSchema() (graphql.Schema, error) {
return nil, s3errors.ErrNotAuthenticated
}
id, ok := p.Args["id"].(string)
id, ok := p.Args["id"].(*types.ID)
if !ok {
return nil, fmt.Errorf("Failed to parse args")
}
@@ -86,7 +86,7 @@ func GraphqlSchema() (graphql.Schema, error) {
log.Debug("querry 'file': ", id)
return types.File{
ID: id,
ID: *id,
}, nil
},
},
@@ -107,7 +107,7 @@ func GraphqlSchema() (graphql.Schema, error) {
Type: graphql.String,
Args: graphql.FieldConfigArgument{
"id": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.ID),
Type: graphql.NewNonNull(objIDType),
},
},
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
@@ -115,24 +115,24 @@ func GraphqlSchema() (graphql.Schema, error) {
return nil, s3errors.ErrNotAuthenticated
}
id, ok := p.Args["id"].(string)
id, ok := p.Args["id"].(*types.ID)
if !ok {
return nil, fmt.Errorf("Failed to parse args")
}
log.Debug("mutation 'delete': ", id)
return id, deleteMutation(p.Context, id)
return id, deleteMutation(p.Context, *id)
},
},
"copy": &graphql.Field{
Type: graphqlFileType,
Args: graphql.FieldConfigArgument{
"src": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.ID),
Type: graphql.NewNonNull(objIDType),
},
"dest": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.ID),
Type: graphql.NewNonNull(objIDType),
},
},
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
@@ -140,28 +140,28 @@ func GraphqlSchema() (graphql.Schema, error) {
return nil, s3errors.ErrNotAuthenticated
}
src, ok := p.Args["src"].(string)
src, ok := p.Args["src"].(*types.ID)
if !ok {
return nil, fmt.Errorf("Failed to parse args")
}
dest, ok := p.Args["dest"].(string)
dest, ok := p.Args["dest"].(*types.ID)
if !ok {
return nil, fmt.Errorf("Failed to parse args")
}
log.Debug("mutation 'copy': ", src, "-->", dest)
return copyMutation(p.Context, src, dest)
return copyMutation(p.Context, *src, *dest)
},
},
"move": &graphql.Field{
Type: graphqlFileType,
Args: graphql.FieldConfigArgument{
"src": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.ID),
Type: graphql.NewNonNull(objIDType),
},
"dest": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.ID),
Type: graphql.NewNonNull(objIDType),
},
},
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
@@ -169,25 +169,25 @@ func GraphqlSchema() (graphql.Schema, error) {
return nil, s3errors.ErrNotAuthenticated
}
src, ok := p.Args["src"].(string)
src, ok := p.Args["src"].(*types.ID)
if !ok {
return nil, fmt.Errorf("Failed to parse args")
}
dest, ok := p.Args["dest"].(string)
dest, ok := p.Args["dest"].(*types.ID)
if !ok {
return nil, fmt.Errorf("Failed to parse args")
}
log.Debug("mutation 'move': ", src, "-->", dest)
return moveMutation(p.Context, src, dest)
return moveMutation(p.Context, *src, *dest)
},
},
"createDir": &graphql.Field{
Type: graphql.NewNonNull(graphqlDirType),
Args: graphql.FieldConfigArgument{
"path": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.ID),
Type: graphql.NewNonNull(objIDType),
},
},
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
@@ -195,21 +195,21 @@ func GraphqlSchema() (graphql.Schema, error) {
return nil, s3errors.ErrNotAuthenticated
}
path, ok := p.Args["path"].(string)
path, ok := p.Args["path"].(*types.ID)
if !ok {
return nil, fmt.Errorf("Failed to parse args")
}
log.Debug("mutation 'createDir': ", path)
return createDirectory(p.Context, path)
return createDirectory(p.Context, *path)
},
},
"deleteDir": &graphql.Field{
Type: graphql.NewNonNull(graphql.String),
Args: graphql.FieldConfigArgument{
"path": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.ID),
Type: graphql.NewNonNull(objIDType),
},
},
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
@@ -217,7 +217,7 @@ func GraphqlSchema() (graphql.Schema, error) {
return nil, s3errors.ErrNotAuthenticated
}
path, ok := p.Args["path"].(string)
path, ok := p.Args["path"].(*types.ID)
if !ok {
return nil, fmt.Errorf("Failed to parse args")
@@ -225,7 +225,7 @@ func GraphqlSchema() (graphql.Schema, error) {
log.Debug("mutation 'deleteDir': ", path)
return path, deleteDirectory(p.Context, path)
return path, deleteDirectory(p.Context, *path)
},
},
"login": &graphql.Field{