diff --git a/go.mod b/go.mod index 90c2121..5129d82 100644 --- a/go.mod +++ b/go.mod @@ -8,5 +8,5 @@ require ( github.com/graphql-go/handler v0.2.3 github.com/minio/minio-go/v7 v7.0.12 github.com/opentracing/opentracing-go v1.2.0 // indirect - github.com/patrickmn/go-cache v2.1.0+incompatible // indirect + github.com/patrickmn/go-cache v2.1.0+incompatible ) diff --git a/internal/dataloader.go b/internal/dataloader.go index 332fe3d..cf73af9 100644 --- a/internal/dataloader.go +++ b/internal/dataloader.go @@ -22,7 +22,8 @@ func listObjectsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result for _, v := range k { results = append(results, &dataloader.Result{ - Data: listObjects(s3Client, bucketName, v.String(), false), + Data: listObjects(s3Client, bucketName, v.String(), false), + Error: nil, }) } @@ -54,12 +55,11 @@ func getFilesBatch(c context.Context, k dataloader.Keys) []*dataloader.Result { } for _, v := range k { - path := v.String() files := make([]File, 0) if !strings.HasSuffix(path, "/") { - path = path + "/" + path += "/" } thunk := loader["listObjects"].Load(c, dataloader.StringKey(path)) @@ -71,16 +71,14 @@ func getFilesBatch(c context.Context, k dataloader.Keys) []*dataloader.Result { for _, obj := range objects.([]minio.ObjectInfo) { if obj.Err != nil { // TODO: how to handle? - } else { - if !strings.HasSuffix(obj.Key, "/") { - files = append(files, File{ - ID: obj.Key, - Name: filepath.Base(obj.Key), - Size: obj.Size, - ContentType: obj.ContentType, - ETag: obj.ETag, - }) - } + } else if !strings.HasSuffix(obj.Key, "/") { + files = append(files, File{ + ID: obj.Key, + Name: filepath.Base(obj.Key), + Size: obj.Size, + ContentType: obj.ContentType, + ETag: obj.ETag, + }) } } @@ -104,7 +102,6 @@ func getFileBatch(c context.Context, k dataloader.Keys) []*dataloader.Result { } for _, v := range k { - obj, err := s3Client.StatObject(context.Background(), bucketName, v.String(), minio.StatObjectOptions{}) if err != nil { @@ -123,7 +120,6 @@ func getFileBatch(c context.Context, k dataloader.Keys) []*dataloader.Result { Error: nil, }) } - } return results @@ -139,12 +135,11 @@ func getDirsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result { } for _, v := range k { - path := v.String() dirs := make([]Directory, 0) if !strings.HasSuffix(path, "/") { - path = path + "/" + path += "/" } thunk := loader["listObjects"].Load(c, dataloader.StringKey(path)) @@ -156,13 +151,11 @@ func getDirsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result { for _, obj := range objects.([]minio.ObjectInfo) { if obj.Err != nil { // TODO: how to handle? - } else { - if strings.HasSuffix(obj.Key, "/") { - dirs = append(dirs, Directory{ - ID: obj.Key, - Name: filepath.Base(obj.Key), - }) - } + } else if strings.HasSuffix(obj.Key, "/") { + dirs = append(dirs, Directory{ + ID: obj.Key, + Name: filepath.Base(obj.Key), + }) } } @@ -192,10 +185,25 @@ func handleLoaderError(k dataloader.Keys, err error) []*dataloader.Result { func createDataloader(config AppConfig) map[string]*dataloader.Loader { loaderMap := make(map[string]*dataloader.Loader, 0) - loaderMap["getFiles"] = dataloader.NewBatchedLoader(getFilesBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup))) - loaderMap["getFile"] = dataloader.NewBatchedLoader(getFileBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup))) - loaderMap["listObjects"] = dataloader.NewBatchedLoader(listObjectsBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup))) - loaderMap["getDirs"] = dataloader.NewBatchedLoader(getDirsBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup))) + loaderMap["getFiles"] = dataloader.NewBatchedLoader( + getFilesBatch, + dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)), + ) + + loaderMap["getFile"] = dataloader.NewBatchedLoader( + getFileBatch, + dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)), + ) + + loaderMap["listObjects"] = dataloader.NewBatchedLoader( + listObjectsBatch, + dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)), + ) + + loaderMap["getDirs"] = dataloader.NewBatchedLoader( + getDirsBatch, + dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)), + ) return loaderMap } diff --git a/internal/graphqlTypes.go b/internal/graphqlTypes.go index e41fd2c..71f2de6 100644 --- a/internal/graphqlTypes.go +++ b/internal/graphqlTypes.go @@ -61,6 +61,7 @@ func graphqlTypes() { if err != nil { return nil, err } + return file.Size, nil }, }, @@ -71,6 +72,7 @@ func graphqlTypes() { if err != nil { return nil, err } + return file.ContentType, nil }, }, @@ -81,6 +83,7 @@ func graphqlTypes() { if err != nil { return nil, err } + return file.ETag, nil }, }, @@ -126,7 +129,7 @@ func graphqlTypes() { Resolve: func(p graphql.ResolveParams) (interface{}, error) { source, ok := p.Source.(Directory) if !ok { - return nil, fmt.Errorf("Failed to parse Source for directorys resolve") + return nil, fmt.Errorf("Failed to parse Source for directories resolve") } loader := p.Context.Value("loader").(map[string]*dataloader.Loader) @@ -141,7 +144,7 @@ func graphqlTypes() { Resolve: func(p graphql.ResolveParams) (interface{}, error) { source, ok := p.Source.(Directory) if !ok { - return nil, fmt.Errorf("Failed to parse Source for directorys resolve") + return nil, fmt.Errorf("Failed to parse Source for directories resolve") } dirs := strings.Split(source.ID, "/") diff --git a/internal/httpServer.go b/internal/httpServer.go index 0637376..79cb764 100644 --- a/internal/httpServer.go +++ b/internal/httpServer.go @@ -15,7 +15,7 @@ import ( ) // initHttp setup and start the http server. Blocking -func initHttp(schema graphql.Schema, resolveContext context.Context) { +func initHttp(resolveContext context.Context, schema graphql.Schema) error { h := handler.New(&handler.Config{ Schema: &schema, Pretty: true, @@ -39,7 +39,7 @@ func initHttp(schema graphql.Schema, resolveContext context.Context) { } }) - http.ListenAndServe(":8080", nil) + return http.ListenAndServe(":8080", nil) } func httpGetFile(ctx context.Context, rw http.ResponseWriter, r *http.Request) { @@ -48,20 +48,20 @@ func httpGetFile(ctx context.Context, rw http.ResponseWriter, r *http.Request) { objInfo, err := s3Client.StatObject(context.Background(), bucketName, id, minio.GetObjectOptions{}) if err != nil { - rw.WriteHeader(500) + rw.WriteHeader(http.StatusInternalServerError) return } reqEtag := r.Header.Get("If-None-Match") if reqEtag == objInfo.ETag { - rw.WriteHeader(304) + rw.WriteHeader(http.StatusNotModified) return } obj, err := s3Client.GetObject(context.Background(), bucketName, id, minio.GetObjectOptions{}) if err != nil { - rw.WriteHeader(500) + rw.WriteHeader(http.StatusInternalServerError) return } @@ -70,7 +70,12 @@ func httpGetFile(ctx context.Context, rw http.ResponseWriter, r *http.Request) { rw.Header().Set("Content-Type", objInfo.ContentType) rw.Header().Set("ETag", objInfo.ETag) - io.Copy(rw, obj) + _, err = io.Copy(rw, obj) + + if err != nil { + rw.WriteHeader(http.StatusInternalServerError) + return + } } func httpPostFile(ctx context.Context, rw http.ResponseWriter, r *http.Request) { @@ -82,13 +87,19 @@ func httpPostFile(ctx context.Context, rw http.ResponseWriter, r *http.Request) contentType := r.Header.Get("Content-Type") mimeType, _, _ := mime.ParseMediaType(contentType) - s3Client.PutObject(context.Background(), bucketName, id, r.Body, r.ContentLength, minio.PutObjectOptions{ + info, err := s3Client.PutObject(context.Background(), bucketName, id, r.Body, r.ContentLength, minio.PutObjectOptions{ ContentType: mimeType, }) - // Invalidate cache - loader["getFile"].Clear(ctx, dataloader.StringKey(id)) - loader["listObjects"].Clear(ctx, dataloader.StringKey(id)) - loader["getFiles"].Clear(ctx, dataloader.StringKey(filepath.Dir(id))) + if err != nil { + rw.WriteHeader(http.StatusInternalServerError) + return + } + // Invalidate cache + loader["getFile"].Clear(ctx, dataloader.StringKey(info.Key)) + loader["listObjects"].Clear(ctx, dataloader.StringKey(info.Key)) + loader["getFiles"].Clear(ctx, dataloader.StringKey(filepath.Dir(info.Key))) + + rw.WriteHeader(http.StatusCreated) } diff --git a/internal/s3Broswer.go b/internal/s3Broswer.go index c5fba84..b4ac794 100644 --- a/internal/s3Broswer.go +++ b/internal/s3Broswer.go @@ -81,5 +81,9 @@ func Start(config AppConfig) { resolveContext := context.WithValue(context.Background(), "s3Client", s3Client) resolveContext = context.WithValue(resolveContext, "loader", loaderMap) - initHttp(schema, resolveContext) + err = initHttp(resolveContext, schema) + + if err != nil { + log.Printf("Failed to start webserver: %s", err.Error()) + } }