From 48f770f703a1e550be8b11c85dcbe5b01bb75b98 Mon Sep 17 00:00:00 2001 From: Djeeberjr Date: Sat, 27 Nov 2021 04:07:51 +0100 Subject: [PATCH] added loader tests --- internal/loader/loader_test.go | 105 +++++++++++++++++++++++++++++++++ 1 file changed, 105 insertions(+) create mode 100644 internal/loader/loader_test.go diff --git a/internal/loader/loader_test.go b/internal/loader/loader_test.go new file mode 100644 index 0000000..8d8f5e9 --- /dev/null +++ b/internal/loader/loader_test.go @@ -0,0 +1,105 @@ +package loader_test + +import ( + "context" + "strings" + "testing" + + "git.kapelle.org/niklas/s3browser/internal/loader" + "git.kapelle.org/niklas/s3browser/internal/s3" + "git.kapelle.org/niklas/s3browser/internal/types" + "github.com/graph-gophers/dataloader" + "github.com/stretchr/testify/assert" +) + +func setup(t *testing.T) (context.Context, *loader.Loader, *assert.Assertions) { + assert := assert.New(t) + s3, _ := s3.NewMockS3([]string{"bucket1", "bucket2"}) + loader := loader.NewLoader(loader.CacheConfig{ + ListObjectsLoaderCache: &dataloader.NoCache{}, + ListObjectsRecursiveLoaderCache: &dataloader.NoCache{}, + StatObjectLoaderCache: &dataloader.NoCache{}, + ListBucketsLoaderCache: &dataloader.NoCache{}, + }) + + fillS3(s3) + ctx := context.WithValue(context.Background(), "s3Client", s3) + + return ctx, loader, assert +} + +func fillS3(s3 s3.S3Service) { + ctx := context.Background() + length := int64(len("content")) + + for _, v := range []string{ + "bucket1:/file1", "bucket1:/file2", "bucket1:/dir1/file1", + "bucket1:/dir1/file2", "bucket1:/dir2/file1", "bucket1:/dir1/sub1/file1", + "bucket1:/dir1/sub1/file2", + } { + s3.PutObject(ctx, *types.ParseID(v), strings.NewReader("content"), length) + } +} + +func TestCreateLoader(t *testing.T) { + assert := assert.New(t) + loader := loader.NewLoader(loader.CacheConfig{ + ListObjectsLoaderCache: &dataloader.NoCache{}, + ListObjectsRecursiveLoaderCache: &dataloader.NoCache{}, + StatObjectLoaderCache: &dataloader.NoCache{}, + ListBucketsLoaderCache: &dataloader.NoCache{}, + }) + + assert.NotNil(loader) +} + +func TestGetBuckets(t *testing.T) { + ctx, loader, assert := setup(t) + + buckets, err := loader.GetBuckets(ctx) + assert.NoError(err) + assert.Len(buckets, 2) + assert.Contains(buckets, "bucket1") + assert.Contains(buckets, "bucket2") +} + +func TestGetFile(t *testing.T) { + ctx, loader, assert := setup(t) + + file, err := loader.GetFile(ctx, *types.ParseID("bucket1:/dir1/file1")) + assert.NoError(err) + + assert.Equal("bucket1:/dir1/file1", file.ID.String()) + assert.Equal("file1", file.Name) + assert.Equal(int64(len("content")), file.Size) +} + +func TestGetFiles(t *testing.T) { + ctx, loader, assert := setup(t) + + id := types.ParseID("bucket1:/") + + files, err := loader.GetFiles(ctx, *id) + assert.NoError(err) + assert.Len(files, 2) +} + +func TestGetDir(t *testing.T) { + ctx, loader, assert := setup(t) + + id := types.ParseID("bucket1:/") + + dirs, err := loader.GetDirs(ctx, *id) + assert.NoError(err) + assert.Len(dirs, 2) +} + +func Test(t *testing.T) { + ctx, loader, assert := setup(t) + + id := types.ParseID("bucket1:/dir1/") + + files, err := loader.GetFilesRecursive(ctx, *id) + assert.NoError(err) + assert.Len(files, 4) +}