fixed linter stuff
This commit is contained in:
parent
acc9d6b5c3
commit
da5d597b3f
2
go.mod
2
go.mod
@ -8,5 +8,5 @@ require (
|
|||||||
github.com/graphql-go/handler v0.2.3
|
github.com/graphql-go/handler v0.2.3
|
||||||
github.com/minio/minio-go/v7 v7.0.12
|
github.com/minio/minio-go/v7 v7.0.12
|
||||||
github.com/opentracing/opentracing-go v1.2.0 // indirect
|
github.com/opentracing/opentracing-go v1.2.0 // indirect
|
||||||
github.com/patrickmn/go-cache v2.1.0+incompatible // indirect
|
github.com/patrickmn/go-cache v2.1.0+incompatible
|
||||||
)
|
)
|
||||||
|
@ -23,6 +23,7 @@ func listObjectsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result
|
|||||||
for _, v := range k {
|
for _, v := range k {
|
||||||
results = append(results, &dataloader.Result{
|
results = append(results, &dataloader.Result{
|
||||||
Data: listObjects(s3Client, bucketName, v.String(), false),
|
Data: listObjects(s3Client, bucketName, v.String(), false),
|
||||||
|
Error: nil,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -54,12 +55,11 @@ func getFilesBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, v := range k {
|
for _, v := range k {
|
||||||
|
|
||||||
path := v.String()
|
path := v.String()
|
||||||
files := make([]File, 0)
|
files := make([]File, 0)
|
||||||
|
|
||||||
if !strings.HasSuffix(path, "/") {
|
if !strings.HasSuffix(path, "/") {
|
||||||
path = path + "/"
|
path += "/"
|
||||||
}
|
}
|
||||||
|
|
||||||
thunk := loader["listObjects"].Load(c, dataloader.StringKey(path))
|
thunk := loader["listObjects"].Load(c, dataloader.StringKey(path))
|
||||||
@ -71,8 +71,7 @@ func getFilesBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
|
|||||||
for _, obj := range objects.([]minio.ObjectInfo) {
|
for _, obj := range objects.([]minio.ObjectInfo) {
|
||||||
if obj.Err != nil {
|
if obj.Err != nil {
|
||||||
// TODO: how to handle?
|
// TODO: how to handle?
|
||||||
} else {
|
} else if !strings.HasSuffix(obj.Key, "/") {
|
||||||
if !strings.HasSuffix(obj.Key, "/") {
|
|
||||||
files = append(files, File{
|
files = append(files, File{
|
||||||
ID: obj.Key,
|
ID: obj.Key,
|
||||||
Name: filepath.Base(obj.Key),
|
Name: filepath.Base(obj.Key),
|
||||||
@ -82,7 +81,6 @@ func getFilesBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
results = append(results, &dataloader.Result{
|
results = append(results, &dataloader.Result{
|
||||||
Data: files,
|
Data: files,
|
||||||
@ -104,7 +102,6 @@ func getFileBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, v := range k {
|
for _, v := range k {
|
||||||
|
|
||||||
obj, err := s3Client.StatObject(context.Background(), bucketName, v.String(), minio.StatObjectOptions{})
|
obj, err := s3Client.StatObject(context.Background(), bucketName, v.String(), minio.StatObjectOptions{})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -123,7 +120,6 @@ func getFileBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
|
|||||||
Error: nil,
|
Error: nil,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return results
|
return results
|
||||||
@ -139,12 +135,11 @@ func getDirsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, v := range k {
|
for _, v := range k {
|
||||||
|
|
||||||
path := v.String()
|
path := v.String()
|
||||||
dirs := make([]Directory, 0)
|
dirs := make([]Directory, 0)
|
||||||
|
|
||||||
if !strings.HasSuffix(path, "/") {
|
if !strings.HasSuffix(path, "/") {
|
||||||
path = path + "/"
|
path += "/"
|
||||||
}
|
}
|
||||||
|
|
||||||
thunk := loader["listObjects"].Load(c, dataloader.StringKey(path))
|
thunk := loader["listObjects"].Load(c, dataloader.StringKey(path))
|
||||||
@ -156,15 +151,13 @@ func getDirsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
|
|||||||
for _, obj := range objects.([]minio.ObjectInfo) {
|
for _, obj := range objects.([]minio.ObjectInfo) {
|
||||||
if obj.Err != nil {
|
if obj.Err != nil {
|
||||||
// TODO: how to handle?
|
// TODO: how to handle?
|
||||||
} else {
|
} else if strings.HasSuffix(obj.Key, "/") {
|
||||||
if strings.HasSuffix(obj.Key, "/") {
|
|
||||||
dirs = append(dirs, Directory{
|
dirs = append(dirs, Directory{
|
||||||
ID: obj.Key,
|
ID: obj.Key,
|
||||||
Name: filepath.Base(obj.Key),
|
Name: filepath.Base(obj.Key),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
results = append(results, &dataloader.Result{
|
results = append(results, &dataloader.Result{
|
||||||
Data: dirs,
|
Data: dirs,
|
||||||
@ -192,10 +185,25 @@ func handleLoaderError(k dataloader.Keys, err error) []*dataloader.Result {
|
|||||||
func createDataloader(config AppConfig) map[string]*dataloader.Loader {
|
func createDataloader(config AppConfig) map[string]*dataloader.Loader {
|
||||||
loaderMap := make(map[string]*dataloader.Loader, 0)
|
loaderMap := make(map[string]*dataloader.Loader, 0)
|
||||||
|
|
||||||
loaderMap["getFiles"] = dataloader.NewBatchedLoader(getFilesBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)))
|
loaderMap["getFiles"] = dataloader.NewBatchedLoader(
|
||||||
loaderMap["getFile"] = dataloader.NewBatchedLoader(getFileBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)))
|
getFilesBatch,
|
||||||
loaderMap["listObjects"] = dataloader.NewBatchedLoader(listObjectsBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)))
|
dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)),
|
||||||
loaderMap["getDirs"] = dataloader.NewBatchedLoader(getDirsBatch, dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)))
|
)
|
||||||
|
|
||||||
|
loaderMap["getFile"] = dataloader.NewBatchedLoader(
|
||||||
|
getFileBatch,
|
||||||
|
dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)),
|
||||||
|
)
|
||||||
|
|
||||||
|
loaderMap["listObjects"] = dataloader.NewBatchedLoader(
|
||||||
|
listObjectsBatch,
|
||||||
|
dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)),
|
||||||
|
)
|
||||||
|
|
||||||
|
loaderMap["getDirs"] = dataloader.NewBatchedLoader(
|
||||||
|
getDirsBatch,
|
||||||
|
dataloader.WithCache(newCache(config.CacheTTL, config.CacheCleanup)),
|
||||||
|
)
|
||||||
|
|
||||||
return loaderMap
|
return loaderMap
|
||||||
}
|
}
|
||||||
|
@ -61,6 +61,7 @@ func graphqlTypes() {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return file.Size, nil
|
return file.Size, nil
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -71,6 +72,7 @@ func graphqlTypes() {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return file.ContentType, nil
|
return file.ContentType, nil
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -81,6 +83,7 @@ func graphqlTypes() {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return file.ETag, nil
|
return file.ETag, nil
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -126,7 +129,7 @@ func graphqlTypes() {
|
|||||||
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
|
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
|
||||||
source, ok := p.Source.(Directory)
|
source, ok := p.Source.(Directory)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, fmt.Errorf("Failed to parse Source for directorys resolve")
|
return nil, fmt.Errorf("Failed to parse Source for directories resolve")
|
||||||
}
|
}
|
||||||
|
|
||||||
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
|
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
|
||||||
@ -141,7 +144,7 @@ func graphqlTypes() {
|
|||||||
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
|
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
|
||||||
source, ok := p.Source.(Directory)
|
source, ok := p.Source.(Directory)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, fmt.Errorf("Failed to parse Source for directorys resolve")
|
return nil, fmt.Errorf("Failed to parse Source for directories resolve")
|
||||||
}
|
}
|
||||||
|
|
||||||
dirs := strings.Split(source.ID, "/")
|
dirs := strings.Split(source.ID, "/")
|
||||||
|
@ -15,7 +15,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// initHttp setup and start the http server. Blocking
|
// initHttp setup and start the http server. Blocking
|
||||||
func initHttp(schema graphql.Schema, resolveContext context.Context) {
|
func initHttp(resolveContext context.Context, schema graphql.Schema) error {
|
||||||
h := handler.New(&handler.Config{
|
h := handler.New(&handler.Config{
|
||||||
Schema: &schema,
|
Schema: &schema,
|
||||||
Pretty: true,
|
Pretty: true,
|
||||||
@ -39,7 +39,7 @@ func initHttp(schema graphql.Schema, resolveContext context.Context) {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
http.ListenAndServe(":8080", nil)
|
return http.ListenAndServe(":8080", nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
func httpGetFile(ctx context.Context, rw http.ResponseWriter, r *http.Request) {
|
func httpGetFile(ctx context.Context, rw http.ResponseWriter, r *http.Request) {
|
||||||
@ -48,20 +48,20 @@ func httpGetFile(ctx context.Context, rw http.ResponseWriter, r *http.Request) {
|
|||||||
objInfo, err := s3Client.StatObject(context.Background(), bucketName, id, minio.GetObjectOptions{})
|
objInfo, err := s3Client.StatObject(context.Background(), bucketName, id, minio.GetObjectOptions{})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
rw.WriteHeader(500)
|
rw.WriteHeader(http.StatusInternalServerError)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
reqEtag := r.Header.Get("If-None-Match")
|
reqEtag := r.Header.Get("If-None-Match")
|
||||||
if reqEtag == objInfo.ETag {
|
if reqEtag == objInfo.ETag {
|
||||||
rw.WriteHeader(304)
|
rw.WriteHeader(http.StatusNotModified)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
obj, err := s3Client.GetObject(context.Background(), bucketName, id, minio.GetObjectOptions{})
|
obj, err := s3Client.GetObject(context.Background(), bucketName, id, minio.GetObjectOptions{})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
rw.WriteHeader(500)
|
rw.WriteHeader(http.StatusInternalServerError)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -70,7 +70,12 @@ func httpGetFile(ctx context.Context, rw http.ResponseWriter, r *http.Request) {
|
|||||||
rw.Header().Set("Content-Type", objInfo.ContentType)
|
rw.Header().Set("Content-Type", objInfo.ContentType)
|
||||||
rw.Header().Set("ETag", objInfo.ETag)
|
rw.Header().Set("ETag", objInfo.ETag)
|
||||||
|
|
||||||
io.Copy(rw, obj)
|
_, err = io.Copy(rw, obj)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
rw.WriteHeader(http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func httpPostFile(ctx context.Context, rw http.ResponseWriter, r *http.Request) {
|
func httpPostFile(ctx context.Context, rw http.ResponseWriter, r *http.Request) {
|
||||||
@ -82,13 +87,19 @@ func httpPostFile(ctx context.Context, rw http.ResponseWriter, r *http.Request)
|
|||||||
contentType := r.Header.Get("Content-Type")
|
contentType := r.Header.Get("Content-Type")
|
||||||
mimeType, _, _ := mime.ParseMediaType(contentType)
|
mimeType, _, _ := mime.ParseMediaType(contentType)
|
||||||
|
|
||||||
s3Client.PutObject(context.Background(), bucketName, id, r.Body, r.ContentLength, minio.PutObjectOptions{
|
info, err := s3Client.PutObject(context.Background(), bucketName, id, r.Body, r.ContentLength, minio.PutObjectOptions{
|
||||||
ContentType: mimeType,
|
ContentType: mimeType,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Invalidate cache
|
if err != nil {
|
||||||
loader["getFile"].Clear(ctx, dataloader.StringKey(id))
|
rw.WriteHeader(http.StatusInternalServerError)
|
||||||
loader["listObjects"].Clear(ctx, dataloader.StringKey(id))
|
return
|
||||||
loader["getFiles"].Clear(ctx, dataloader.StringKey(filepath.Dir(id)))
|
}
|
||||||
|
|
||||||
|
// Invalidate cache
|
||||||
|
loader["getFile"].Clear(ctx, dataloader.StringKey(info.Key))
|
||||||
|
loader["listObjects"].Clear(ctx, dataloader.StringKey(info.Key))
|
||||||
|
loader["getFiles"].Clear(ctx, dataloader.StringKey(filepath.Dir(info.Key)))
|
||||||
|
|
||||||
|
rw.WriteHeader(http.StatusCreated)
|
||||||
}
|
}
|
||||||
|
@ -81,5 +81,9 @@ func Start(config AppConfig) {
|
|||||||
resolveContext := context.WithValue(context.Background(), "s3Client", s3Client)
|
resolveContext := context.WithValue(context.Background(), "s3Client", s3Client)
|
||||||
resolveContext = context.WithValue(resolveContext, "loader", loaderMap)
|
resolveContext = context.WithValue(resolveContext, "loader", loaderMap)
|
||||||
|
|
||||||
initHttp(schema, resolveContext)
|
err = initHttp(resolveContext, schema)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Failed to start webserver: %s", err.Error())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user