Compare commits

..

3 Commits

Author SHA1 Message Date
48f770f703 added loader tests 2021-11-27 04:07:51 +01:00
2ae14cdfd4 fixed mock s3 list dirs 2021-11-27 04:07:41 +01:00
a10593a318 loader cache config 2021-11-27 04:07:27 +01:00
4 changed files with 141 additions and 6 deletions

View File

@@ -23,11 +23,18 @@ type Loader struct {
listBucketsLoaderCache cache.S3Cache
}
func NewLoader(config types.AppConfig) *Loader {
listObjectsLoaderCache := &dataloader.NoCache{}
listObjectsRecursiveLoaderCache := &dataloader.NoCache{}
statObjectLoaderCache := cache.NewTTLCache(config.CacheTTL, config.CacheCleanup)
listBucketsLoaderCache := cache.NewTTLCache(config.CacheTTL, config.CacheCleanup)
type CacheConfig struct {
ListObjectsLoaderCache cache.S3Cache
ListObjectsRecursiveLoaderCache cache.S3Cache
StatObjectLoaderCache cache.S3Cache
ListBucketsLoaderCache cache.S3Cache
}
func NewLoader(cacheConfig CacheConfig) *Loader {
listObjectsLoaderCache := cacheConfig.ListObjectsLoaderCache
listObjectsRecursiveLoaderCache := cacheConfig.ListObjectsRecursiveLoaderCache
statObjectLoaderCache := cacheConfig.StatObjectLoaderCache
listBucketsLoaderCache := cacheConfig.ListBucketsLoaderCache
return &Loader{
listObjectsLoader: dataloader.NewBatchedLoader(

View File

@@ -0,0 +1,105 @@
package loader_test
import (
"context"
"strings"
"testing"
"git.kapelle.org/niklas/s3browser/internal/loader"
"git.kapelle.org/niklas/s3browser/internal/s3"
"git.kapelle.org/niklas/s3browser/internal/types"
"github.com/graph-gophers/dataloader"
"github.com/stretchr/testify/assert"
)
func setup(t *testing.T) (context.Context, *loader.Loader, *assert.Assertions) {
assert := assert.New(t)
s3, _ := s3.NewMockS3([]string{"bucket1", "bucket2"})
loader := loader.NewLoader(loader.CacheConfig{
ListObjectsLoaderCache: &dataloader.NoCache{},
ListObjectsRecursiveLoaderCache: &dataloader.NoCache{},
StatObjectLoaderCache: &dataloader.NoCache{},
ListBucketsLoaderCache: &dataloader.NoCache{},
})
fillS3(s3)
ctx := context.WithValue(context.Background(), "s3Client", s3)
return ctx, loader, assert
}
func fillS3(s3 s3.S3Service) {
ctx := context.Background()
length := int64(len("content"))
for _, v := range []string{
"bucket1:/file1", "bucket1:/file2", "bucket1:/dir1/file1",
"bucket1:/dir1/file2", "bucket1:/dir2/file1", "bucket1:/dir1/sub1/file1",
"bucket1:/dir1/sub1/file2",
} {
s3.PutObject(ctx, *types.ParseID(v), strings.NewReader("content"), length)
}
}
func TestCreateLoader(t *testing.T) {
assert := assert.New(t)
loader := loader.NewLoader(loader.CacheConfig{
ListObjectsLoaderCache: &dataloader.NoCache{},
ListObjectsRecursiveLoaderCache: &dataloader.NoCache{},
StatObjectLoaderCache: &dataloader.NoCache{},
ListBucketsLoaderCache: &dataloader.NoCache{},
})
assert.NotNil(loader)
}
func TestGetBuckets(t *testing.T) {
ctx, loader, assert := setup(t)
buckets, err := loader.GetBuckets(ctx)
assert.NoError(err)
assert.Len(buckets, 2)
assert.Contains(buckets, "bucket1")
assert.Contains(buckets, "bucket2")
}
func TestGetFile(t *testing.T) {
ctx, loader, assert := setup(t)
file, err := loader.GetFile(ctx, *types.ParseID("bucket1:/dir1/file1"))
assert.NoError(err)
assert.Equal("bucket1:/dir1/file1", file.ID.String())
assert.Equal("file1", file.Name)
assert.Equal(int64(len("content")), file.Size)
}
func TestGetFiles(t *testing.T) {
ctx, loader, assert := setup(t)
id := types.ParseID("bucket1:/")
files, err := loader.GetFiles(ctx, *id)
assert.NoError(err)
assert.Len(files, 2)
}
func TestGetDir(t *testing.T) {
ctx, loader, assert := setup(t)
id := types.ParseID("bucket1:/")
dirs, err := loader.GetDirs(ctx, *id)
assert.NoError(err)
assert.Len(dirs, 2)
}
func Test(t *testing.T) {
ctx, loader, assert := setup(t)
id := types.ParseID("bucket1:/dir1/")
files, err := loader.GetFilesRecursive(ctx, *id)
assert.NoError(err)
assert.Len(files, 4)
}

View File

@@ -47,14 +47,30 @@ func (m *mockS3) ListBuckets(ctx context.Context) ([]string, error) {
func (m *mockS3) ListObjects(ctx context.Context, id types.ID) ([]Object, error) {
var results []Object
dirs := make(map[string]bool)
depth := len(strings.Split(id.Key, "/"))
for k, v := range m.objects {
if k.Bucket == id.Bucket {
if k.Parent().Key == id.Key {
results = append(results, *mockObjToObject(v, k))
} else if strings.HasPrefix(k.Key, id.Key) {
s := strings.Join(strings.Split(k.Key, "/")[:depth], "/") + "/"
dirs[s] = true
}
}
}
for k := range dirs {
results = append(results, Object{
ID: types.ID{
Bucket: id.Bucket,
Key: k,
},
})
}
return results, nil
}

View File

@@ -3,8 +3,10 @@ package s3browser
import (
"context"
"github.com/graph-gophers/dataloader"
log "github.com/sirupsen/logrus"
"git.kapelle.org/niklas/s3browser/internal/cache"
"git.kapelle.org/niklas/s3browser/internal/db"
gql "git.kapelle.org/niklas/s3browser/internal/gql"
httpserver "git.kapelle.org/niklas/s3browser/internal/httpserver"
@@ -33,7 +35,12 @@ func Start(config types.AppConfig) {
log.Error("Failed to connect DB: ", err.Error())
}
loader := loader.NewLoader(config)
loader := loader.NewLoader(loader.CacheConfig{
ListObjectsLoaderCache: &dataloader.NoCache{},
ListObjectsRecursiveLoaderCache: &dataloader.NoCache{},
StatObjectLoaderCache: cache.NewTTLCache(config.CacheTTL, config.CacheCleanup),
ListBucketsLoaderCache: cache.NewTTLCache(config.CacheTTL, config.CacheCleanup),
})
gql.GraphqlTypes()
schema, err := gql.GraphqlSchema()