| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | /* | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  |  * MinIO Cloud Storage, (C) 2018,2019 MinIO, Inc. | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  |  * | 
					
						
							|  |  |  |  * Licensed under the Apache License, Version 2.0 (the "License"); | 
					
						
							|  |  |  |  * you may not use this file except in compliance with the License. | 
					
						
							|  |  |  |  * You may obtain a copy of the License at | 
					
						
							|  |  |  |  * | 
					
						
							|  |  |  |  *     http://www.apache.org/licenses/LICENSE-2.0
 | 
					
						
							|  |  |  |  * | 
					
						
							|  |  |  |  * Unless required by applicable law or agreed to in writing, software | 
					
						
							|  |  |  |  * distributed under the License is distributed on an "AS IS" BASIS, | 
					
						
							|  |  |  |  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 
					
						
							|  |  |  |  * See the License for the specific language governing permissions and | 
					
						
							|  |  |  |  * limitations under the License. | 
					
						
							|  |  |  |  */ | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | package cmd | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | import ( | 
					
						
							|  |  |  | 	"bytes" | 
					
						
							|  |  |  | 	"context" | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 	"io" | 
					
						
							|  |  |  | 	"net/http" | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 	"testing" | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	"github.com/minio/minio/pkg/hash" | 
					
						
							|  |  |  | ) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | // Initialize cache objects.
 | 
					
						
							| 
									
										
										
										
											2020-02-23 21:33:39 +08:00
										 |  |  | func initCacheObjects(disk string, cacheMaxUse, cacheAfter, cacheWatermarkLow, cacheWatermarkHigh int) (*diskCache, error) { | 
					
						
							|  |  |  | 	return newDiskCache(disk, cacheMaxUse, cacheAfter, cacheWatermarkLow, cacheWatermarkHigh) | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | // inits diskCache struct for nDisks
 | 
					
						
							| 
									
										
										
										
											2020-02-23 21:33:39 +08:00
										 |  |  | func initDiskCaches(drives []string, cacheMaxUse, cacheAfter, cacheWatermarkLow, cacheWatermarkHigh int, t *testing.T) ([]*diskCache, error) { | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 	var cb []*diskCache | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 	for _, d := range drives { | 
					
						
							| 
									
										
										
										
											2020-02-23 21:33:39 +08:00
										 |  |  | 		obj, err := initCacheObjects(d, cacheMaxUse, cacheAfter, cacheWatermarkLow, cacheWatermarkHigh) | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 		if err != nil { | 
					
						
							|  |  |  | 			return nil, err | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 		cb = append(cb, obj) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	return cb, nil | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | // Tests ToObjectInfo function.
 | 
					
						
							|  |  |  | func TestCacheMetadataObjInfo(t *testing.T) { | 
					
						
							|  |  |  | 	m := cacheMeta{Meta: nil} | 
					
						
							|  |  |  | 	objInfo := m.ToObjectInfo("testbucket", "testobject") | 
					
						
							|  |  |  | 	if objInfo.Size != 0 { | 
					
						
							|  |  |  | 		t.Fatal("Unexpected object info value for Size", objInfo.Size) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	if objInfo.ModTime != timeSentinel { | 
					
						
							|  |  |  | 		t.Fatal("Unexpected object info value for ModTime ", objInfo.ModTime) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	if objInfo.IsDir { | 
					
						
							|  |  |  | 		t.Fatal("Unexpected object info value for IsDir", objInfo.IsDir) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	if !objInfo.Expires.IsZero() { | 
					
						
							|  |  |  | 		t.Fatal("Unexpected object info value for Expires ", objInfo.Expires) | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | // test whether a drive being offline causes
 | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | // getCachedLoc to fetch next online drive
 | 
					
						
							|  |  |  | func TestGetCachedLoc(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 	for n := 1; n < 10; n++ { | 
					
						
							|  |  |  | 		fsDirs, err := getRandomDisks(n) | 
					
						
							|  |  |  | 		if err != nil { | 
					
						
							|  |  |  | 			t.Fatal(err) | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2020-02-23 21:33:39 +08:00
										 |  |  | 		d, err := initDiskCaches(fsDirs, 100, 1, 80, 90, t) | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 		if err != nil { | 
					
						
							|  |  |  | 			t.Fatal(err) | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 		c := cacheObjects{cache: d} | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 		bucketName := "testbucket" | 
					
						
							|  |  |  | 		objectName := "testobject" | 
					
						
							|  |  |  | 		// find cache drive where object would be hashed
 | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 		index := c.hashIndex(bucketName, objectName) | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 		// turn off drive by setting online status to false
 | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 		c.cache[index].online = false | 
					
						
							| 
									
										
										
										
											2019-10-16 09:35:41 +08:00
										 |  |  | 		cfs, err := c.getCacheLoc(bucketName, objectName) | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 		if n == 1 && err == errDiskNotFound { | 
					
						
							|  |  |  | 			continue | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		if err != nil { | 
					
						
							|  |  |  | 			t.Fatal(err) | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		i := -1 | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 		for j, f := range c.cache { | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 			if f == cfs { | 
					
						
							|  |  |  | 				i = j | 
					
						
							|  |  |  | 				break | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		if i != (index+1)%n { | 
					
						
							|  |  |  | 			t.Fatalf("expected next cache location to be picked") | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | // test whether a drive being offline causes
 | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | // getCachedLoc to fetch next online drive
 | 
					
						
							|  |  |  | func TestGetCacheMaxUse(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 	for n := 1; n < 10; n++ { | 
					
						
							|  |  |  | 		fsDirs, err := getRandomDisks(n) | 
					
						
							|  |  |  | 		if err != nil { | 
					
						
							|  |  |  | 			t.Fatal(err) | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2020-02-23 21:33:39 +08:00
										 |  |  | 		d, err := initDiskCaches(fsDirs, 80, 1, 80, 90, t) | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 		if err != nil { | 
					
						
							|  |  |  | 			t.Fatal(err) | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 		c := cacheObjects{cache: d} | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 		bucketName := "testbucket" | 
					
						
							|  |  |  | 		objectName := "testobject" | 
					
						
							|  |  |  | 		// find cache drive where object would be hashed
 | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 		index := c.hashIndex(bucketName, objectName) | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 		// turn off drive by setting online status to false
 | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 		c.cache[index].online = false | 
					
						
							| 
									
										
										
										
											2019-10-16 09:35:41 +08:00
										 |  |  | 		cb, err := c.getCacheLoc(bucketName, objectName) | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 		if n == 1 && err == errDiskNotFound { | 
					
						
							|  |  |  | 			continue | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		if err != nil { | 
					
						
							|  |  |  | 			t.Fatal(err) | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		i := -1 | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 		for j, f := range d { | 
					
						
							|  |  |  | 			if f == cb { | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 				i = j | 
					
						
							|  |  |  | 				break | 
					
						
							|  |  |  | 			} | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		if i != (index+1)%n { | 
					
						
							|  |  |  | 			t.Fatalf("expected next cache location to be picked") | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | // test wildcard patterns for excluding entries from cache
 | 
					
						
							|  |  |  | func TestCacheExclusion(t *testing.T) { | 
					
						
							| 
									
										
										
										
											2019-07-11 06:41:11 +08:00
										 |  |  | 	cobjects := &cacheObjects{ | 
					
						
							|  |  |  | 		cache: nil, | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 	} | 
					
						
							| 
									
										
										
										
											2019-07-11 06:41:11 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 	testCases := []struct { | 
					
						
							|  |  |  | 		bucketName     string | 
					
						
							|  |  |  | 		objectName     string | 
					
						
							|  |  |  | 		excludePattern string | 
					
						
							|  |  |  | 		expectedResult bool | 
					
						
							|  |  |  | 	}{ | 
					
						
							|  |  |  | 		{"testbucket", "testobjectmatch", "testbucket/testobj*", true}, | 
					
						
							|  |  |  | 		{"testbucket", "testobjectnomatch", "testbucet/testobject*", false}, | 
					
						
							|  |  |  | 		{"testbucket", "testobject/pref1/obj1", "*/*", true}, | 
					
						
							|  |  |  | 		{"testbucket", "testobject/pref1/obj1", "*/pref1/*", true}, | 
					
						
							|  |  |  | 		{"testbucket", "testobject/pref1/obj1", "testobject/*", false}, | 
					
						
							|  |  |  | 		{"photos", "image1.jpg", "*.jpg", true}, | 
					
						
							|  |  |  | 		{"photos", "europe/paris/seine.jpg", "seine.jpg", false}, | 
					
						
							|  |  |  | 		{"photos", "europe/paris/seine.jpg", "*/seine.jpg", true}, | 
					
						
							|  |  |  | 		{"phil", "z/likes/coffee", "*/likes/*", true}, | 
					
						
							|  |  |  | 		{"failbucket", "no/slash/prefixes", "/failbucket/no/", false}, | 
					
						
							|  |  |  | 		{"failbucket", "no/slash/prefixes", "/failbucket/no/*", false}, | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	for i, testCase := range testCases { | 
					
						
							| 
									
										
										
										
											2019-07-11 06:41:11 +08:00
										 |  |  | 		cobjects.exclude = []string{testCase.excludePattern} | 
					
						
							|  |  |  | 		if cobjects.isCacheExclude(testCase.bucketName, testCase.objectName) != testCase.expectedResult { | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 			t.Fatal("Cache exclusion test failed for case ", i) | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | // Test diskCache.
 | 
					
						
							|  |  |  | func TestDiskCache(t *testing.T) { | 
					
						
							|  |  |  | 	fsDirs, err := getRandomDisks(1) | 
					
						
							|  |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		t.Fatal(err) | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-02-23 21:33:39 +08:00
										 |  |  | 	d, err := initDiskCaches(fsDirs, 100, 0, 80, 90, t) | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		t.Fatal(err) | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 	c := cacheObjects{cache: d} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	cache := c.cache[0] | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 	ctx := context.Background() | 
					
						
							|  |  |  | 	bucketName := "testbucket" | 
					
						
							|  |  |  | 	objectName := "testobject" | 
					
						
							|  |  |  | 	content := "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" | 
					
						
							|  |  |  | 	etag := "061208c10af71a30c6dcd6cf5d89f0fe" | 
					
						
							|  |  |  | 	contentType := "application/zip" | 
					
						
							|  |  |  | 	size := len(content) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	httpMeta := make(map[string]string) | 
					
						
							|  |  |  | 	httpMeta["etag"] = etag | 
					
						
							|  |  |  | 	httpMeta["content-type"] = contentType | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	objInfo := ObjectInfo{} | 
					
						
							|  |  |  | 	objInfo.Bucket = bucketName | 
					
						
							|  |  |  | 	objInfo.Name = objectName | 
					
						
							|  |  |  | 	objInfo.Size = int64(size) | 
					
						
							|  |  |  | 	objInfo.ContentType = contentType | 
					
						
							|  |  |  | 	objInfo.ETag = etag | 
					
						
							|  |  |  | 	objInfo.UserDefined = httpMeta | 
					
						
							| 
									
										
										
										
											2019-02-09 13:31:06 +08:00
										 |  |  | 	var opts ObjectOptions | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 	byteReader := bytes.NewReader([]byte(content)) | 
					
						
							| 
									
										
										
										
											2019-05-09 09:35:40 +08:00
										 |  |  | 	hashReader, err := hash.NewReader(byteReader, int64(size), "", "", int64(size), globalCLIContext.StrictS3Compat) | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		t.Fatal(err) | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-02-04 11:40:01 +08:00
										 |  |  | 	err = cache.Put(ctx, bucketName, objectName, hashReader, hashReader.Size(), nil, ObjectOptions{UserDefined: httpMeta}, false) | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		t.Fatal(err) | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-02-04 11:40:01 +08:00
										 |  |  | 	cReader, _, err := cache.Get(ctx, bucketName, objectName, nil, http.Header{ | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 		"Content-Type": []string{"application/json"}, | 
					
						
							|  |  |  | 	}, opts) | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		t.Fatal(err) | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 	cachedObjInfo := cReader.ObjInfo | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 	if !cache.Exists(ctx, bucketName, objectName) { | 
					
						
							|  |  |  | 		t.Fatal("Expected object to exist on cache") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	if cachedObjInfo.ETag != objInfo.ETag { | 
					
						
							|  |  |  | 		t.Fatal("Expected ETag to match") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	if cachedObjInfo.Size != objInfo.Size { | 
					
						
							|  |  |  | 		t.Fatal("Size mismatch") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	if cachedObjInfo.ContentType != objInfo.ContentType { | 
					
						
							|  |  |  | 		t.Fatal("Cached content-type does not match") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	writer := bytes.NewBuffer(nil) | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 	_, err = io.Copy(writer, cReader) | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		t.Fatal(err) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	if ccontent := writer.Bytes(); !bytes.Equal([]byte(content), ccontent) { | 
					
						
							|  |  |  | 		t.Errorf("wrong cached file content") | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 	cReader.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	cache.Delete(ctx, bucketName, objectName) | 
					
						
							| 
									
										
										
										
											2018-03-29 05:14:06 +08:00
										 |  |  | 	online := cache.IsOnline() | 
					
						
							|  |  |  | 	if !online { | 
					
						
							|  |  |  | 		t.Errorf("expected cache drive to be online") | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | // Test diskCache with upper bound on max cache use.
 | 
					
						
							|  |  |  | func TestDiskCacheMaxUse(t *testing.T) { | 
					
						
							|  |  |  | 	fsDirs, err := getRandomDisks(1) | 
					
						
							|  |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		t.Fatal(err) | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2020-02-23 21:33:39 +08:00
										 |  |  | 	d, err := initDiskCaches(fsDirs, 80, 0, 80, 90, t) | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		t.Fatal(err) | 
					
						
							|  |  |  | 	} | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 	cache := d[0] | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 	ctx := context.Background() | 
					
						
							|  |  |  | 	bucketName := "testbucket" | 
					
						
							|  |  |  | 	objectName := "testobject" | 
					
						
							|  |  |  | 	content := "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" | 
					
						
							|  |  |  | 	etag := "061208c10af71a30c6dcd6cf5d89f0fe" | 
					
						
							|  |  |  | 	contentType := "application/zip" | 
					
						
							|  |  |  | 	size := len(content) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	httpMeta := make(map[string]string) | 
					
						
							|  |  |  | 	httpMeta["etag"] = etag | 
					
						
							|  |  |  | 	httpMeta["content-type"] = contentType | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	objInfo := ObjectInfo{} | 
					
						
							|  |  |  | 	objInfo.Bucket = bucketName | 
					
						
							|  |  |  | 	objInfo.Name = objectName | 
					
						
							|  |  |  | 	objInfo.Size = int64(size) | 
					
						
							|  |  |  | 	objInfo.ContentType = contentType | 
					
						
							|  |  |  | 	objInfo.ETag = etag | 
					
						
							|  |  |  | 	objInfo.UserDefined = httpMeta | 
					
						
							| 
									
										
										
										
											2018-09-11 00:42:43 +08:00
										 |  |  | 	opts := ObjectOptions{} | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 	byteReader := bytes.NewReader([]byte(content)) | 
					
						
							| 
									
										
										
										
											2019-05-09 09:35:40 +08:00
										 |  |  | 	hashReader, err := hash.NewReader(byteReader, int64(size), "", "", int64(size), globalCLIContext.StrictS3Compat) | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 	if err != nil { | 
					
						
							|  |  |  | 		t.Fatal(err) | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | 	if !cache.diskAvailable(int64(size)) { | 
					
						
							| 
									
										
										
										
											2020-02-04 11:40:01 +08:00
										 |  |  | 		err = cache.Put(ctx, bucketName, objectName, hashReader, hashReader.Size(), nil, ObjectOptions{UserDefined: httpMeta}, false) | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 		if err != errDiskFull { | 
					
						
							|  |  |  | 			t.Fatal("Cache max-use limit violated.") | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} else { | 
					
						
							| 
									
										
										
										
											2020-02-04 11:40:01 +08:00
										 |  |  | 		err = cache.Put(ctx, bucketName, objectName, hashReader, hashReader.Size(), nil, ObjectOptions{UserDefined: httpMeta}, false) | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 		if err != nil { | 
					
						
							|  |  |  | 			t.Fatal(err) | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2020-02-04 11:40:01 +08:00
										 |  |  | 		cReader, _, err := cache.Get(ctx, bucketName, objectName, nil, nil, opts) | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 		if err != nil { | 
					
						
							|  |  |  | 			t.Fatal(err) | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 		cachedObjInfo := cReader.ObjInfo | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 		if !cache.Exists(ctx, bucketName, objectName) { | 
					
						
							|  |  |  | 			t.Fatal("Expected object to exist on cache") | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		if cachedObjInfo.ETag != objInfo.ETag { | 
					
						
							|  |  |  | 			t.Fatal("Expected ETag to match") | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		if cachedObjInfo.Size != objInfo.Size { | 
					
						
							|  |  |  | 			t.Fatal("Size mismatch") | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		if cachedObjInfo.ContentType != objInfo.ContentType { | 
					
						
							|  |  |  | 			t.Fatal("Cached content-type does not match") | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		writer := bytes.NewBuffer(nil) | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 		_, err = io.Copy(writer, cReader) | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 		if err != nil { | 
					
						
							|  |  |  | 			t.Fatal(err) | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 		if ccontent := writer.Bytes(); !bytes.Equal([]byte(content), ccontent) { | 
					
						
							|  |  |  | 			t.Errorf("wrong cached file content") | 
					
						
							|  |  |  | 		} | 
					
						
							| 
									
										
										
										
											2019-08-10 08:09:08 +08:00
										 |  |  | 		cReader.Close() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 		cache.Delete(ctx, bucketName, objectName) | 
					
						
							| 
									
										
										
										
											2018-06-26 01:24:12 +08:00
										 |  |  | 		online := cache.IsOnline() | 
					
						
							|  |  |  | 		if !online { | 
					
						
							|  |  |  | 			t.Errorf("expected cache drive to be online") | 
					
						
							|  |  |  | 		} | 
					
						
							|  |  |  | 	} | 
					
						
							|  |  |  | } |