Skip to content

Commit aea76ee

Browse files
authored
Bug fix: Avoid race condition by deduplicating entries in bucket stores user scan (cortexproject#6863)
* Make scanUser always return unique user IDs to avoid concurrent map write Signed-off-by: Justin Jung <jungjust@amazon.com> * Changelog Signed-off-by: Justin Jung <jungjust@amazon.com> * Instead of returning error, just deduplicate Signed-off-by: Justin Jung <jungjust@amazon.com> * changelog Signed-off-by: Justin Jung <jungjust@amazon.com> --------- Signed-off-by: Justin Jung <jungjust@amazon.com>
1 parent ee87d7e commit aea76ee

File tree

3 files changed

+56
-0
lines changed

3 files changed

+56
-0
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,7 @@
7272
* [BUGFIX] Ruler: Prevent counting 2xx and 4XX responses as failed writes. #6785
7373
* [BUGFIX] Ingester: Allow shipper to skip corrupted blocks. #6786
7474
* [BUGFIX] Compactor: Delete the prefix `blocks_meta` from the metadata fetcher metrics. #6832
75+
* [BUGFIX] Store Gateway: Avoid race condition by deduplicating entries in bucket stores user scan. #6863
7576

7677
## 1.19.0 2025-02-27
7778

pkg/storegateway/bucket_stores.go

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -458,10 +458,25 @@ func (u *BucketStores) scanUsers(ctx context.Context) ([]string, error) {
458458
users := make([]string, 0, len(activeUsers)+len(deletingUsers))
459459
users = append(users, activeUsers...)
460460
users = append(users, deletingUsers...)
461+
users = deduplicateUsers(users)
461462

462463
return users, err
463464
}
464465

466+
func deduplicateUsers(users []string) []string {
467+
seen := make(map[string]struct{}, len(users))
468+
var uniqueUsers []string
469+
470+
for _, user := range users {
471+
if _, ok := seen[user]; !ok {
472+
seen[user] = struct{}{}
473+
uniqueUsers = append(uniqueUsers, user)
474+
}
475+
}
476+
477+
return uniqueUsers
478+
}
479+
465480
func (u *BucketStores) getStore(userID string) *store.BucketStore {
466481
u.storesMu.RLock()
467482
defer u.storesMu.RUnlock()

pkg/storegateway/bucket_stores_test.go

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -454,6 +454,38 @@ func TestBucketStores_syncUsersBlocks(t *testing.T) {
454454
}
455455
}
456456

457+
func TestBucketStores_scanUsers(t *testing.T) {
458+
t.Parallel()
459+
460+
tests := map[string]struct {
461+
scanner *mockScanner
462+
expectedRes []string
463+
}{
464+
"should return unique users only": {
465+
scanner: &mockScanner{
466+
res: []string{"user-1", "user-2", "user-1"},
467+
},
468+
expectedRes: []string{"user-1", "user-2"},
469+
},
470+
}
471+
472+
for testName, testData := range tests {
473+
testData := testData
474+
t.Run(testName, func(t *testing.T) {
475+
t.Parallel()
476+
477+
stores := &BucketStores{
478+
userScanner: testData.scanner,
479+
}
480+
481+
users, err := stores.scanUsers(context.Background())
482+
483+
assert.NoError(t, err)
484+
assert.ElementsMatch(t, testData.expectedRes, users)
485+
})
486+
}
487+
}
488+
457489
func TestBucketStores_Series_ShouldCorrectlyQuerySeriesSpanningMultipleChunks(t *testing.T) {
458490
for _, lazyLoadingEnabled := range []bool{true, false} {
459491
t.Run(fmt.Sprintf("lazy loading enabled = %v", lazyLoadingEnabled), func(t *testing.T) {
@@ -996,3 +1028,11 @@ func (f *failFirstGetBucket) Get(ctx context.Context, name string) (io.ReadClose
9961028

9971029
return f.Bucket.Get(ctx, name)
9981030
}
1031+
1032+
type mockScanner struct {
1033+
res []string
1034+
}
1035+
1036+
func (m *mockScanner) ScanUsers(_ context.Context) (active, deleting, deleted []string, err error) {
1037+
return m.res, nil, nil, nil
1038+
}

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy