added logging

This commit is contained in:
Niklas 2021-08-12 17:48:28 +02:00
parent 926701125d
commit 7253a9195d
5 changed files with 59 additions and 11 deletions

1
go.mod
View File

@ -9,4 +9,5 @@ require (
github.com/minio/minio-go/v7 v7.0.12
github.com/opentracing/opentracing-go v1.2.0 // indirect
github.com/patrickmn/go-cache v2.1.0+incompatible
github.com/sirupsen/logrus v1.8.1
)

View File

@ -8,10 +8,12 @@ import (
"github.com/graph-gophers/dataloader"
"github.com/minio/minio-go/v7"
log "github.com/sirupsen/logrus"
)
// listObjectsBatch batch func for calling s3.ListObjects()
func listObjectsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
log.Debug("listObjectsBatch: ", k.Keys())
var results []*dataloader.Result
s3Client, ok := c.Value("s3Client").(*minio.Client)
@ -32,6 +34,7 @@ func listObjectsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result
// listObjects helper func for listObjectsBatch
func listObjects(s3Client *minio.Client, bukitName, path string, recursive bool) []minio.ObjectInfo {
log.Debug("S3 call 'ListObjects': ", path)
objectCh := s3Client.ListObjects(context.Background(), bukitName, minio.ListObjectsOptions{
Prefix: path,
Recursive: false,
@ -47,6 +50,7 @@ func listObjects(s3Client *minio.Client, bukitName, path string, recursive bool)
// getFilesBatch batch func for getting all files in path. Uses "listObjects" dataloader
func getFilesBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
log.Debug("getFilesBatch: ", k.Keys())
var results []*dataloader.Result
loader, ok := c.Value("loader").(map[string]*dataloader.Loader)
@ -94,6 +98,7 @@ func getFilesBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
// getFileBatch batch func for getting object info
func getFileBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
log.Debug("getFileBatch: ", k.Keys())
var results []*dataloader.Result
s3Client, ok := c.Value("s3Client").(*minio.Client)
@ -103,6 +108,7 @@ func getFileBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
}
for _, v := range k {
log.Debug("S3 call 'StatObject': ", v.String())
obj, err := s3Client.StatObject(context.Background(), bucketName, v.String(), minio.StatObjectOptions{})
if err != nil {
@ -129,6 +135,7 @@ func getFileBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
// getDirsBatch batch func for getting dirs in a path
func getDirsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
log.Debug("getDirsBatch: ", k.Keys())
var results []*dataloader.Result
loader, ok := c.Value("loader").(map[string]*dataloader.Loader)
@ -172,6 +179,7 @@ func getDirsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
// handleLoaderError helper func when the whole batch failed
func handleLoaderError(k dataloader.Keys, err error) []*dataloader.Result {
log.Error(err.Error())
var results []*dataloader.Result
for range k {
results = append(results, &dataloader.Result{

View File

@ -12,6 +12,8 @@ import (
"github.com/graphql-go/graphql"
"github.com/graphql-go/handler"
"github.com/minio/minio-go/v7"
log "github.com/sirupsen/logrus"
)
// initHttp setup and start the http server. Blocking
@ -45,6 +47,8 @@ func initHttp(resolveContext context.Context, schema graphql.Schema) error {
func httpGetFile(ctx context.Context, rw http.ResponseWriter, r *http.Request) {
s3Client := ctx.Value("s3Client").(*minio.Client)
id := r.URL.Query().Get("id")
log.Debug("S3 call 'StatObject': ", id)
objInfo, err := s3Client.StatObject(context.Background(), bucketName, id, minio.GetObjectOptions{})
if err != nil {
@ -58,6 +62,7 @@ func httpGetFile(ctx context.Context, rw http.ResponseWriter, r *http.Request) {
return
}
log.Debug("S3 call 'GetObject': ", id)
obj, err := s3Client.GetObject(context.Background(), bucketName, id, minio.GetObjectOptions{})
if err != nil {
@ -84,9 +89,12 @@ func httpPostFile(ctx context.Context, rw http.ResponseWriter, r *http.Request)
id := r.URL.Query().Get("id")
log.Debug("Upload file: ", id)
contentType := r.Header.Get("Content-Type")
mimeType, _, _ := mime.ParseMediaType(contentType)
log.Debug("S3 call 'PutObject': ", id)
info, err := s3Client.PutObject(context.Background(), bucketName, id, r.Body, r.ContentLength, minio.PutObjectOptions{
ContentType: mimeType,
})

View File

@ -2,11 +2,12 @@ package s3browser
import (
"context"
"log"
"fmt"
"time"
"github.com/minio/minio-go/v7"
"github.com/minio/minio-go/v7/pkg/credentials"
log "github.com/sirupsen/logrus"
)
// AppConfig general config
@ -41,50 +42,64 @@ type Directory struct {
var bucketName = "dev"
// setupS3Client connect the s3Client
func setupS3Client(config AppConfig) *minio.Client {
func setupS3Client(config AppConfig) (*minio.Client, error) {
minioClient, err := minio.New(config.S3Endoint, &minio.Options{
Creds: credentials.NewStaticV4(config.S3AccessKey, config.S3SecretKey, ""),
Secure: config.S3SSL,
})
if err != nil {
log.Fatalln(err)
return nil, err
}
exists, err := minioClient.BucketExists(context.Background(), config.S3Buket)
if err != nil {
log.Fatalln(err)
return nil, err
}
if !exists {
log.Fatalf("Bucket '%s' does not exist", config.S3Buket)
} else {
log.Print("S3 client connected")
return nil, fmt.Errorf("Bucket '%s' does not exist", config.S3Buket)
}
return minioClient
return minioClient, nil
}
// Start starts the app
func Start(config AppConfig) {
s3Client := setupS3Client(config)
log.SetLevel(log.DebugLevel)
log.Info("Starting")
log.Debug("Setting up s3 client")
s3Client, err := setupS3Client(config)
if err != nil {
log.Error("Failed to setup s3 client: ", err.Error())
return
}
log.Info("s3 client connected")
log.Debug("Creating dataloader")
loaderMap := createDataloader(config)
log.Debug("Generating graphq schema")
graphqlTypes()
schema, err := graphqlSchema()
if err != nil {
log.Panic(err)
log.Error("Failed to generate graphq schemas: ", err.Error())
return
}
resolveContext := context.WithValue(context.Background(), "s3Client", s3Client)
resolveContext = context.WithValue(resolveContext, "loader", loaderMap)
log.Debug("Starting HTTP server")
err = initHttp(resolveContext, schema)
if err != nil {
log.Printf("Failed to start webserver: %s", err.Error())
log.Error("Failed to start webserver: ", err.Error())
return
}
}

View File

@ -5,6 +5,8 @@ import (
"github.com/graph-gophers/dataloader"
"github.com/graphql-go/graphql"
log "github.com/sirupsen/logrus"
)
// graphqlSchema generate the schema with its root query and mutation
@ -24,6 +26,9 @@ func graphqlSchema() (graphql.Schema, error) {
if !ok {
return nil, nil
}
log.Debug("querry 'files': ", path)
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
thunk := loader["getFiles"].Load(p.Context, dataloader.StringKey(path))
return thunk()
@ -42,6 +47,9 @@ func graphqlSchema() (graphql.Schema, error) {
if !ok {
return nil, nil
}
log.Debug("querry 'directorys': ", path)
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
thunk := loader["getDirs"].Load(p.Context, dataloader.StringKey(path))
return thunk()
@ -60,6 +68,8 @@ func graphqlSchema() (graphql.Schema, error) {
return nil, fmt.Errorf("Failed to parse args")
}
log.Debug("querry 'file': ", id)
return File{
ID: id,
}, nil
@ -81,6 +91,8 @@ func graphqlSchema() (graphql.Schema, error) {
return nil, fmt.Errorf("Failed to parse args")
}
log.Debug("mutation 'delete': ", id)
return id, deleteMutation(p.Context, id)
},
},
@ -104,6 +116,8 @@ func graphqlSchema() (graphql.Schema, error) {
return nil, fmt.Errorf("Failed to parse args")
}
log.Debug("mutation 'copy': ", src, "-->", dest)
return copyMutation(p.Context, src, dest)
},
},
@ -127,6 +141,8 @@ func graphqlSchema() (graphql.Schema, error) {
return nil, fmt.Errorf("Failed to parse args")
}
log.Debug("mutation 'move': ", src, "-->", dest)
return moveMutation(p.Context, src, dest)
},
},