Compare commits

..

6 Commits

Author SHA1 Message Date
31e4586d68 implemented last modified 2021-08-06 19:25:07 +02:00
da5d597b3f fixed linter stuff 2021-08-06 16:31:07 +02:00
acc9d6b5c3 file upload cache invalidation 2021-08-06 14:13:08 +02:00
e095a6ceb0 implemented file upload 2021-08-06 13:49:00 +02:00
b42f45afe9 LESS CACHES 2021-08-06 13:48:49 +02:00
6598088024 MORE CACHES 2021-08-03 23:10:23 +02:00
5 changed files with 214 additions and 98 deletions

2
go.mod
View File

@@ -8,5 +8,5 @@ require (
github.com/graphql-go/handler v0.2.3 github.com/graphql-go/handler v0.2.3
github.com/minio/minio-go/v7 v7.0.12 github.com/minio/minio-go/v7 v7.0.12
github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect
github.com/patrickmn/go-cache v2.1.0+incompatible // indirect github.com/patrickmn/go-cache v2.1.0+incompatible
) )

View File

@@ -23,6 +23,7 @@ func listObjectsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result
for _, v := range k { for _, v := range k {
results = append(results, &dataloader.Result{ results = append(results, &dataloader.Result{
Data: listObjects(s3Client, bucketName, v.String(), false), Data: listObjects(s3Client, bucketName, v.String(), false),
Error: nil,
}) })
} }
@@ -37,7 +38,6 @@ func listObjects(s3Client *minio.Client, bukitName, path string, recursive bool)
}) })
result := make([]minio.ObjectInfo, 0) result := make([]minio.ObjectInfo, 0)
for obj := range objectCh { for obj := range objectCh {
result = append(result, obj) result = append(result, obj)
} }
@@ -55,12 +55,11 @@ func getFilesBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
} }
for _, v := range k { for _, v := range k {
path := v.String() path := v.String()
files := make([]File, 0) files := make([]File, 0)
if !strings.HasSuffix(path, "/") { if !strings.HasSuffix(path, "/") {
path = path + "/" path += "/"
} }
thunk := loader["listObjects"].Load(c, dataloader.StringKey(path)) thunk := loader["listObjects"].Load(c, dataloader.StringKey(path))
@@ -72,18 +71,17 @@ func getFilesBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
for _, obj := range objects.([]minio.ObjectInfo) { for _, obj := range objects.([]minio.ObjectInfo) {
if obj.Err != nil { if obj.Err != nil {
// TODO: how to handle? // TODO: how to handle?
} else { } else if !strings.HasSuffix(obj.Key, "/") {
if !strings.HasSuffix(obj.Key, "/") {
files = append(files, File{ files = append(files, File{
ID: obj.Key, ID: obj.Key,
Name: filepath.Base(obj.Key), Name: filepath.Base(obj.Key),
Size: obj.Size, Size: obj.Size,
ContentType: obj.ContentType, ContentType: obj.ContentType,
ETag: obj.ETag, ETag: obj.ETag,
LastModified: obj.LastModified,
}) })
} }
} }
}
results = append(results, &dataloader.Result{ results = append(results, &dataloader.Result{
Data: files, Data: files,
@@ -119,6 +117,7 @@ func getFileBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
Size: obj.Size, Size: obj.Size,
ContentType: obj.ContentType, ContentType: obj.ContentType,
ETag: obj.ETag, ETag: obj.ETag,
LastModified: obj.LastModified,
}, },
Error: nil, Error: nil,
}) })
@@ -138,12 +137,11 @@ func getDirsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
} }
for _, v := range k { for _, v := range k {
path := v.String() path := v.String()
dirs := make([]Directory, 0) dirs := make([]Directory, 0)
if !strings.HasSuffix(path, "/") { if !strings.HasSuffix(path, "/") {
path = path + "/" path += "/"
} }
thunk := loader["listObjects"].Load(c, dataloader.StringKey(path)) thunk := loader["listObjects"].Load(c, dataloader.StringKey(path))
@@ -155,15 +153,13 @@ func getDirsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
for _, obj := range objects.([]minio.ObjectInfo) { for _, obj := range objects.([]minio.ObjectInfo) {
if obj.Err != nil { if obj.Err != nil {
// TODO: how to handle? // TODO: how to handle?
} else { } else if strings.HasSuffix(obj.Key, "/") {
if strings.HasSuffix(obj.Key, "/") {
dirs = append(dirs, Directory{ dirs = append(dirs, Directory{
ID: obj.Key, ID: obj.Key,
Name: filepath.Base(obj.Key), Name: filepath.Base(obj.Key),
}) })
} }
} }
}
results = append(results, &dataloader.Result{ results = append(results, &dataloader.Result{
Data: dirs, Data: dirs,
@@ -187,14 +183,29 @@ func handleLoaderError(k dataloader.Keys, err error) []*dataloader.Result {
return results return results
} }
// createDataloader create all dataloaders and return a map of them // createDataloader create all dataloaders and return a map of them plus a cache for objects
func createDataloader(config AppConfig) map[string]*dataloader.Loader { func createDataloader(config AppConfig) map[string]*dataloader.Loader {
loaderMap := make(map[string]*dataloader.Loader, 0) loaderMap := make(map[string]*dataloader.Loader, 0)
loaderMap["getFiles"] = dataloader.NewBatchedLoader(getFilesBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup))) loaderMap["getFiles"] = dataloader.NewBatchedLoader(
loaderMap["getFile"] = dataloader.NewBatchedLoader(getFileBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup))) getFilesBatch,
loaderMap["listObjects"] = dataloader.NewBatchedLoader(listObjectsBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup))) dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)),
loaderMap["getDirs"] = dataloader.NewBatchedLoader(getDirsBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup))) )
loaderMap["getFile"] = dataloader.NewBatchedLoader(
getFileBatch,
dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)),
)
loaderMap["listObjects"] = dataloader.NewBatchedLoader(
listObjectsBatch,
dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)),
)
loaderMap["getDirs"] = dataloader.NewBatchedLoader(
getDirsBatch,
dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)),
)
return loaderMap return loaderMap
} }

View File

@@ -4,9 +4,11 @@ import (
"fmt" "fmt"
"path/filepath" "path/filepath"
"strings" "strings"
"time"
"github.com/graph-gophers/dataloader" "github.com/graph-gophers/dataloader"
"github.com/graphql-go/graphql" "github.com/graphql-go/graphql"
"github.com/graphql-go/graphql/language/ast"
) )
var graphqlDirType *graphql.Object var graphqlDirType *graphql.Object
@@ -14,6 +16,38 @@ var graphqlFileType *graphql.Object
// graphqlTypes create all graphql types and stores the in the global variables // graphqlTypes create all graphql types and stores the in the global variables
func graphqlTypes() { func graphqlTypes() {
var dateTimeType = graphql.NewScalar(graphql.ScalarConfig{
Name: "DateTime",
Description: "DateTime is a DateTime in ISO 8601 format",
Serialize: func(value interface{}) interface{} {
switch value := value.(type) {
case time.Time:
return value.Format(time.RFC3339)
}
return "INVALID"
},
ParseValue: func(value interface{}) interface{} {
switch tvalue := value.(type) {
case string:
if tval, err := time.Parse(time.RFC3339, tvalue); err != nil {
return nil
} else {
return tval
}
}
return nil
},
ParseLiteral: func(valueAST ast.Value) interface{} {
switch valueAST := valueAST.(type) {
case *ast.StringValue:
return valueAST.Value
}
return nil
},
})
graphqlDirType = graphql.NewObject(graphql.ObjectConfig{ graphqlDirType = graphql.NewObject(graphql.ObjectConfig{
Name: "Directory", Name: "Directory",
Description: "Represents a directory", Description: "Represents a directory",
@@ -61,6 +95,7 @@ func graphqlTypes() {
if err != nil { if err != nil {
return nil, err return nil, err
} }
return file.Size, nil return file.Size, nil
}, },
}, },
@@ -71,6 +106,7 @@ func graphqlTypes() {
if err != nil { if err != nil {
return nil, err return nil, err
} }
return file.ContentType, nil return file.ContentType, nil
}, },
}, },
@@ -81,9 +117,21 @@ func graphqlTypes() {
if err != nil { if err != nil {
return nil, err return nil, err
} }
return file.ETag, nil return file.ETag, nil
}, },
}, },
"lastModified": &graphql.Field{
Type: dateTimeType,
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
file, err := loadFile(p)
if err != nil {
return nil, err
}
return file.LastModified, nil
},
},
"parent": &graphql.Field{ "parent": &graphql.Field{
Type: graphqlDirType, Type: graphqlDirType,
Resolve: func(p graphql.ResolveParams) (interface{}, error) { Resolve: func(p graphql.ResolveParams) (interface{}, error) {
@@ -126,7 +174,7 @@ func graphqlTypes() {
Resolve: func(p graphql.ResolveParams) (interface{}, error) { Resolve: func(p graphql.ResolveParams) (interface{}, error) {
source, ok := p.Source.(Directory) source, ok := p.Source.(Directory)
if !ok { if !ok {
return nil, fmt.Errorf("Failed to parse Source for directorys resolve") return nil, fmt.Errorf("Failed to parse Source for directories resolve")
} }
loader := p.Context.Value("loader").(map[string]*dataloader.Loader) loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
@@ -141,7 +189,7 @@ func graphqlTypes() {
Resolve: func(p graphql.ResolveParams) (interface{}, error) { Resolve: func(p graphql.ResolveParams) (interface{}, error) {
source, ok := p.Source.(Directory) source, ok := p.Source.(Directory)
if !ok { if !ok {
return nil, fmt.Errorf("Failed to parse Source for directorys resolve") return nil, fmt.Errorf("Failed to parse Source for directories resolve")
} }
dirs := strings.Split(source.ID, "/") dirs := strings.Split(source.ID, "/")

105
internal/httpServer.go Normal file
View File

@@ -0,0 +1,105 @@
package s3browser
import (
"context"
"fmt"
"io"
"mime"
"net/http"
"path/filepath"
"github.com/graph-gophers/dataloader"
"github.com/graphql-go/graphql"
"github.com/graphql-go/handler"
"github.com/minio/minio-go/v7"
)
// initHttp setup and start the http server. Blocking
func initHttp(resolveContext context.Context, schema graphql.Schema) error {
h := handler.New(&handler.Config{
Schema: &schema,
Pretty: true,
GraphiQL: false,
Playground: true,
})
http.HandleFunc("/graphql", func(rw http.ResponseWriter, r *http.Request) {
h.ContextHandler(resolveContext, rw, r)
})
http.HandleFunc("/api/file", func(rw http.ResponseWriter, r *http.Request) {
if r.Method == "GET" {
httpGetFile(resolveContext, rw, r)
return
}
if r.Method == "POST" {
httpPostFile(resolveContext, rw, r)
return
}
})
return http.ListenAndServe(":8080", nil)
}
func httpGetFile(ctx context.Context, rw http.ResponseWriter, r *http.Request) {
s3Client := ctx.Value("s3Client").(*minio.Client)
id := r.URL.Query().Get("id")
objInfo, err := s3Client.StatObject(context.Background(), bucketName, id, minio.GetObjectOptions{})
if err != nil {
rw.WriteHeader(http.StatusInternalServerError)
return
}
reqEtag := r.Header.Get("If-None-Match")
if reqEtag == objInfo.ETag {
rw.WriteHeader(http.StatusNotModified)
return
}
obj, err := s3Client.GetObject(context.Background(), bucketName, id, minio.GetObjectOptions{})
if err != nil {
rw.WriteHeader(http.StatusInternalServerError)
return
}
rw.Header().Set("Cache-Control", "must-revalidate")
rw.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filepath.Base((objInfo.Key))))
rw.Header().Set("Content-Type", objInfo.ContentType)
rw.Header().Set("ETag", objInfo.ETag)
_, err = io.Copy(rw, obj)
if err != nil {
rw.WriteHeader(http.StatusInternalServerError)
return
}
}
func httpPostFile(ctx context.Context, rw http.ResponseWriter, r *http.Request) {
s3Client := ctx.Value("s3Client").(*minio.Client)
loader := ctx.Value("loader").(map[string]*dataloader.Loader)
id := r.URL.Query().Get("id")
contentType := r.Header.Get("Content-Type")
mimeType, _, _ := mime.ParseMediaType(contentType)
info, err := s3Client.PutObject(context.Background(), bucketName, id, r.Body, r.ContentLength, minio.PutObjectOptions{
ContentType: mimeType,
})
if err != nil {
rw.WriteHeader(http.StatusInternalServerError)
return
}
// Invalidate cache
loader["getFile"].Clear(ctx, dataloader.StringKey(info.Key))
loader["listObjects"].Clear(ctx, dataloader.StringKey(info.Key))
loader["getFiles"].Clear(ctx, dataloader.StringKey(filepath.Dir(info.Key)))
rw.WriteHeader(http.StatusCreated)
}

View File

@@ -2,16 +2,9 @@ package s3browser
import ( import (
"context" "context"
"fmt"
"io"
"log" "log"
"net/http"
"path/filepath"
"time" "time"
"github.com/graph-gophers/dataloader"
"github.com/graphql-go/graphql"
"github.com/graphql-go/handler"
"github.com/minio/minio-go/v7" "github.com/minio/minio-go/v7"
"github.com/minio/minio-go/v7/pkg/credentials" "github.com/minio/minio-go/v7/pkg/credentials"
) )
@@ -34,6 +27,7 @@ type File struct {
Size int64 `json:"size"` Size int64 `json:"size"`
ContentType string `json:"contentType"` ContentType string `json:"contentType"`
ETag string `json:"etag"` ETag string `json:"etag"`
LastModified time.Time `json:"lastModified"`
} }
// Directory represents a directory with its metadata // Directory represents a directory with its metadata
@@ -46,55 +40,6 @@ type Directory struct {
var bucketName = "dev" var bucketName = "dev"
// initHttp setup and start the http server. Blocking
func initHttp(schema graphql.Schema, s3Client *minio.Client, loaderMap map[string]*dataloader.Loader) {
h := handler.New(&handler.Config{
Schema: &schema,
Pretty: true,
GraphiQL: false,
Playground: true,
})
resolveContext := context.WithValue(context.Background(), "s3Client", s3Client)
resolveContext = context.WithValue(resolveContext, "loader", loaderMap)
http.HandleFunc("/graphql", func(rw http.ResponseWriter, r *http.Request) {
h.ContextHandler(resolveContext, rw, r)
})
http.HandleFunc("/api/file", func(rw http.ResponseWriter, r *http.Request) {
id := r.URL.Query().Get("id")
objInfo, err := s3Client.StatObject(context.Background(), bucketName, id, minio.GetObjectOptions{})
if err != nil {
rw.WriteHeader(500)
return
}
reqEtag := r.Header.Get("If-None-Match")
if reqEtag == objInfo.ETag {
rw.WriteHeader(304)
return
}
obj, err := s3Client.GetObject(context.Background(), bucketName, id, minio.GetObjectOptions{})
if err != nil {
rw.WriteHeader(500)
return
}
rw.Header().Set("Cache-Control", "must-revalidate")
rw.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filepath.Base((objInfo.Key))))
rw.Header().Set("Content-Type", objInfo.ContentType)
rw.Header().Set("ETag", objInfo.ETag)
io.Copy(rw, obj)
})
http.ListenAndServe(":8080", nil)
}
// setupS3Client connect the s3Client // setupS3Client connect the s3Client
func setupS3Client(config AppConfig) *minio.Client { func setupS3Client(config AppConfig) *minio.Client {
minioClient, err := minio.New(config.S3Endoint, &minio.Options{ minioClient, err := minio.New(config.S3Endoint, &minio.Options{
@@ -125,7 +70,7 @@ func setupS3Client(config AppConfig) *minio.Client {
func Start(config AppConfig) { func Start(config AppConfig) {
s3Client := setupS3Client(config) s3Client := setupS3Client(config)
loader := createDataloader(config) loaderMap := createDataloader(config)
graphqlTypes() graphqlTypes()
schema, err := graphqlSchema() schema, err := graphqlSchema()
@@ -134,5 +79,12 @@ func Start(config AppConfig) {
log.Panic(err) log.Panic(err)
} }
initHttp(schema, s3Client, loader) resolveContext := context.WithValue(context.Background(), "s3Client", s3Client)
resolveContext = context.WithValue(resolveContext, "loader", loaderMap)
err = initHttp(resolveContext, schema)
if err != nil {
log.Printf("Failed to start webserver: %s", err.Error())
}
} }