initial commit

This commit is contained in:
2021-07-26 14:52:36 +02:00
commit 48eea73833
9 changed files with 690 additions and 0 deletions

200
internal/dataloader.go Normal file
View File

@@ -0,0 +1,200 @@
package s3browser
import (
"context"
"fmt"
"path/filepath"
"strings"
"github.com/graph-gophers/dataloader"
"github.com/minio/minio-go/v7"
)
// listObjectsBatch batch func for calling s3.ListObjects()
func listObjectsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
var results []*dataloader.Result
s3Client, ok := c.Value("s3Client").(*minio.Client)
if !ok {
return handleLoaderError(k, fmt.Errorf("Failed to get s3Client from context"))
}
for _, v := range k {
results = append(results, &dataloader.Result{
Data: listObjects(s3Client, bucketName, v.String(), false),
})
}
return results
}
// listObjects helper func for listObjectsBatch
func listObjects(s3Client *minio.Client, bukitName, path string, recursive bool) []minio.ObjectInfo {
objectCh := s3Client.ListObjects(context.Background(), bukitName, minio.ListObjectsOptions{
Prefix: path,
Recursive: false,
})
result := make([]minio.ObjectInfo, 0)
for obj := range objectCh {
result = append(result, obj)
}
return result
}
// getFilesBatch batch func for getting all files in path. Uses "listObjects" dataloader
func getFilesBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
var results []*dataloader.Result
loader, ok := c.Value("loader").(map[string]*dataloader.Loader)
if !ok {
return handleLoaderError(k, fmt.Errorf("Failed to get loader from context"))
}
for _, v := range k {
path := v.String()
files := make([]File, 0)
if !strings.HasSuffix(path, "/") {
path = path + "/"
}
thunk := loader["listObjects"].Load(c, dataloader.StringKey(path))
objects, _ := thunk()
// TODO: handle thunk error
for _, obj := range objects.([]minio.ObjectInfo) {
if obj.Err != nil {
// TODO: how to handle?
} else {
if !strings.HasSuffix(obj.Key, "/") {
files = append(files, File{
ID: obj.Key,
Name: filepath.Base(obj.Key),
Size: obj.Size,
ContentType: obj.ContentType,
ETag: obj.ETag,
})
}
}
}
results = append(results, &dataloader.Result{
Data: files,
Error: nil,
})
}
return results
}
// getFileBatch batch func for getting object info
func getFileBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
var results []*dataloader.Result
s3Client, ok := c.Value("s3Client").(*minio.Client)
if !ok {
return handleLoaderError(k, fmt.Errorf("Failed to get s3Client from context"))
}
for _, v := range k {
obj, err := s3Client.StatObject(context.Background(), bucketName, v.String(), minio.StatObjectOptions{})
if err != nil {
results = append(results, &dataloader.Result{
Data: nil,
Error: err,
})
} else {
results = append(results, &dataloader.Result{
Data: &File{
ID: obj.Key,
Size: obj.Size,
ContentType: obj.ContentType,
ETag: obj.ETag,
},
Error: nil,
})
}
}
return results
}
// getDirsBatch batch func for getting dirs in a path
func getDirsBatch(c context.Context, k dataloader.Keys) []*dataloader.Result {
var results []*dataloader.Result
loader, ok := c.Value("loader").(map[string]*dataloader.Loader)
if !ok {
return handleLoaderError(k, fmt.Errorf("Failed to get loader from context"))
}
for _, v := range k {
path := v.String()
dirs := make([]Directory, 0)
if !strings.HasSuffix(path, "/") {
path = path + "/"
}
thunk := loader["listObjects"].Load(c, dataloader.StringKey(path))
objects, _ := thunk()
// TODO: handle thunk error
for _, obj := range objects.([]minio.ObjectInfo) {
if obj.Err != nil {
// TODO: how to handle?
} else {
if strings.HasSuffix(obj.Key, "/") {
dirs = append(dirs, Directory{
ID: obj.Key,
Name: filepath.Base(obj.Key),
})
}
}
}
results = append(results, &dataloader.Result{
Data: dirs,
Error: nil,
})
}
return results
}
// handleLoaderError helper func when the whole batch failed
func handleLoaderError(k dataloader.Keys, err error) []*dataloader.Result {
var results []*dataloader.Result
for range k {
results = append(results, &dataloader.Result{
Data: nil,
Error: err,
})
}
return results
}
// createDataloader create all dataloaders and return a map of them
func createDataloader() map[string]*dataloader.Loader {
loaderMap := make(map[string]*dataloader.Loader, 0)
loaderMap["getFiles"] = dataloader.NewBatchedLoader(getFilesBatch)
loaderMap["getFile"] = dataloader.NewBatchedLoader(getFileBatch)
loaderMap["listObjects"] = dataloader.NewBatchedLoader(listObjectsBatch)
loaderMap["getDirs"] = dataloader.NewBatchedLoader(getDirsBatch)
return loaderMap
}

175
internal/graphqlTypes.go Normal file
View File

@@ -0,0 +1,175 @@
package s3browser
import (
"fmt"
"path/filepath"
"strings"
"github.com/graph-gophers/dataloader"
"github.com/graphql-go/graphql"
)
var graphqlDirType *graphql.Object
var graphqlFileType *graphql.Object
// graphqlTypes create all graphql types and stores the in the global variables
func graphqlTypes() {
graphqlDirType = graphql.NewObject(graphql.ObjectConfig{
Name: "Directory",
Description: "Represents a directory",
Fields: graphql.Fields{
"id": &graphql.Field{
Type: graphql.NewNonNull(graphql.ID),
},
"name": &graphql.Field{
Type: graphql.String,
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
source, ok := p.Source.(Directory)
if !ok {
return nil, fmt.Errorf("Failed to parse source for resolve")
}
return filepath.Base(source.ID), nil
},
},
},
})
graphqlFileType = graphql.NewObject(graphql.ObjectConfig{
Name: "File",
Description: "Represents a file, not a directory",
Fields: graphql.Fields{
"id": &graphql.Field{
Type: graphql.NewNonNull(graphql.ID),
Description: "The uniqe ID of the file. Represents the path and the s3 key.",
},
"name": &graphql.Field{
Type: graphql.String,
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
source, ok := p.Source.(File)
if !ok {
return nil, fmt.Errorf("Failed to parse source for resolve")
}
return filepath.Base(source.ID), nil
},
},
"size": &graphql.Field{
Type: graphql.Int,
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
file, err := loadFile(p)
if err != nil {
return nil, err
}
return file.Size, nil
},
},
"contentType": &graphql.Field{
Type: graphql.String,
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
file, err := loadFile(p)
if err != nil {
return nil, err
}
return file.ContentType, nil
},
},
"etag": &graphql.Field{
Type: graphql.String,
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
file, err := loadFile(p)
if err != nil {
return nil, err
}
return file.ETag, nil
},
},
"parent": &graphql.Field{
Type: graphqlDirType,
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
source, ok := p.Source.(File)
if !ok {
return nil, fmt.Errorf("Failed to parse Source for parent resolve")
}
basename := filepath.Dir(source.ID)
if basename == "." {
basename = "/"
}
return Directory{
ID: basename,
}, nil
},
},
},
})
graphqlDirType.AddFieldConfig("files", &graphql.Field{
Type: graphql.NewList(graphqlFileType),
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
source, ok := p.Source.(Directory)
if !ok {
return nil, fmt.Errorf("Failed to parse Source for files resolve")
}
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
thunk := loader["getFiles"].Load(p.Context, dataloader.StringKey(source.ID))
return thunk()
},
})
graphqlDirType.AddFieldConfig("directorys", &graphql.Field{
Type: graphql.NewList(graphqlDirType),
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
source, ok := p.Source.(Directory)
if !ok {
return nil, fmt.Errorf("Failed to parse Source for directorys resolve")
}
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
thunk := loader["getDirs"].Load(p.Context, dataloader.StringKey(source.ID))
return thunk()
},
})
graphqlDirType.AddFieldConfig("parent", &graphql.Field{
Type: graphqlDirType,
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
source, ok := p.Source.(Directory)
if !ok {
return nil, fmt.Errorf("Failed to parse Source for directorys resolve")
}
dirs := strings.Split(source.ID, "/")
return Directory{
ID: strings.Join(dirs[:len(dirs)-2], "/") + "/",
}, nil
},
})
}
// graphqlTypes helper func for using the dataloader to get a file
func loadFile(p graphql.ResolveParams) (*File, error) {
source, ok := p.Source.(File)
if !ok {
return nil, fmt.Errorf("Failed to parse source for resolve")
}
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
thunk := loader["getFile"].Load(p.Context, dataloader.StringKey(source.ID))
result, err := thunk()
file, ok := result.(*File)
if !ok {
return nil, fmt.Errorf("Failed to load file")
}
return file, err
}

102
internal/s3Broswer.go Normal file
View File

@@ -0,0 +1,102 @@
package s3browser
import (
"context"
"log"
"net/http"
"github.com/graph-gophers/dataloader"
"github.com/graphql-go/graphql"
"github.com/graphql-go/handler"
"github.com/minio/minio-go/v7"
"github.com/minio/minio-go/v7/pkg/credentials"
)
// AppConfig general config
type AppConfig struct {
S3Endoint string
S3AccessKey string
S3SecretKey string
S3SSL bool
S3Buket string
}
// File represents a file with its metadata
type File struct {
ID string `json:"id"`
Name string `json:"name"`
Size int64 `json:"size"`
ContentType string `json:"contentType"`
ETag string `json:"etag"`
}
// Directory represents a directory with its metadata
type Directory struct {
ID string `json:"id"`
Name string `json:"name"`
Files []File `json:"files"`
Directorys []Directory `json:"directorys"`
}
var bucketName = "dev"
// initHttp setup and start the http server. Blocking
func initHttp(schema graphql.Schema, s3Client *minio.Client, loaderMap map[string]*dataloader.Loader) {
h := handler.New(&handler.Config{
Schema: &schema,
Pretty: true,
GraphiQL: false,
Playground: true,
})
resolveContext := context.WithValue(context.Background(), "s3Client", s3Client)
resolveContext = context.WithValue(resolveContext, "loader", loaderMap)
http.HandleFunc("/graphql", func(rw http.ResponseWriter, r *http.Request) {
h.ContextHandler(resolveContext, rw, r)
})
http.ListenAndServe(":8080", nil)
}
// setupS3Client connect the s3Client
func setupS3Client(config AppConfig) *minio.Client {
minioClient, err := minio.New(config.S3Endoint, &minio.Options{
Creds: credentials.NewStaticV4(config.S3AccessKey, config.S3SecretKey, ""),
Secure: config.S3SSL,
})
if err != nil {
log.Fatalln(err)
}
exists, err := minioClient.BucketExists(context.Background(), config.S3Buket)
if err != nil {
log.Fatalln(err)
}
if !exists {
log.Fatalf("Bucket '%s' does not exist", config.S3Buket)
} else {
log.Print("S3 client connected")
}
return minioClient
}
// Start starts the app
func Start(config AppConfig) {
s3Client := setupS3Client(config)
loader := createDataloader()
graphqlTypes()
schema, err := graphqlSchema()
if err != nil {
log.Panic(err)
}
initHttp(schema, s3Client, loader)
}

80
internal/schema.go Normal file
View File

@@ -0,0 +1,80 @@
package s3browser
import (
"fmt"
"github.com/graph-gophers/dataloader"
"github.com/graphql-go/graphql"
)
// graphqlSchema generate the schema with its root query and mutation
func graphqlSchema() (graphql.Schema, error) {
fields := graphql.Fields{
"files": &graphql.Field{
Type: graphql.NewNonNull(graphql.NewList(graphqlFileType)),
Args: graphql.FieldConfigArgument{
"path": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
},
},
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
path, ok := p.Args["path"].(string)
if !ok {
return nil, nil
}
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
thunk := loader["getFiles"].Load(p.Context, dataloader.StringKey(path))
return thunk()
},
},
"directorys": &graphql.Field{
Type: graphql.NewNonNull(graphql.NewList(graphqlDirType)),
Args: graphql.FieldConfigArgument{
"path": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
},
},
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
path, ok := p.Args["path"].(string)
if !ok {
return nil, nil
}
loader := p.Context.Value("loader").(map[string]*dataloader.Loader)
thunk := loader["getDirs"].Load(p.Context, dataloader.StringKey(path))
return thunk()
},
},
"file": &graphql.Field{
Type: graphqlFileType,
Args: graphql.FieldConfigArgument{
"id": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.ID),
},
},
Resolve: func(p graphql.ResolveParams) (interface{}, error) {
id, ok := p.Args["id"].(string)
if !ok {
return nil, fmt.Errorf("Failed to parse args")
}
return File{
ID: id,
}, nil
},
},
}
rootQuery := graphql.ObjectConfig{
Name: "RootQuery",
Fields: fields,
}
schemaConfig := graphql.SchemaConfig{
Query: graphql.NewObject(rootQuery),
}
return graphql.NewSchema(schemaConfig)
}