Skip to content

Commit ef443c4

Browse files
authored
Merge pull request from GHSA-h3m7-rqc4-7h9p
fix chunking integer overflow
2 parents 57206a1 + ab7b530 commit ef443c4

File tree

3 files changed

+159
-7
lines changed

3 files changed

+159
-7
lines changed
Lines changed: 99 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
package graph
2+
3+
import (
4+
"fmt"
5+
"math"
6+
"testing"
7+
8+
"github.com/authzed/spicedb/internal/dispatch"
9+
"github.com/authzed/spicedb/internal/graph"
10+
core "github.com/authzed/spicedb/pkg/proto/core/v1"
11+
v1 "github.com/authzed/spicedb/pkg/proto/dispatch/v1"
12+
"github.com/authzed/spicedb/pkg/tuple"
13+
14+
"github.com/stretchr/testify/require"
15+
)
16+
17+
func TestDispatchChunking(t *testing.T) {
18+
schema := `
19+
definition user {
20+
relation self: user
21+
}
22+
23+
definition res {
24+
relation owner : user
25+
permission view = owner->self
26+
}`
27+
28+
resources := make([]*core.RelationTuple, 0, math.MaxUint16+1)
29+
enabled := make([]*core.RelationTuple, 0, math.MaxUint16+1)
30+
for i := 0; i < math.MaxUint16+1; i++ {
31+
resources = append(resources, tuple.Parse(fmt.Sprintf("res:res1#owner@user:user%d", i)))
32+
enabled = append(enabled, tuple.Parse(fmt.Sprintf("user:user%d#self@user:user%d", i, i)))
33+
}
34+
35+
ctx, dispatcher, revision := newLocalDispatcherWithSchemaAndRels(t, schema, append(enabled, resources...))
36+
37+
t.Run("check", func(t *testing.T) {
38+
for _, tpl := range resources[:1] {
39+
checkResult, err := dispatcher.DispatchCheck(ctx, &v1.DispatchCheckRequest{
40+
ResourceRelation: RR(tpl.ResourceAndRelation.Namespace, "view"),
41+
ResourceIds: []string{tpl.ResourceAndRelation.ObjectId},
42+
ResultsSetting: v1.DispatchCheckRequest_ALLOW_SINGLE_RESULT,
43+
Subject: ONR(tpl.Subject.Namespace, tpl.Subject.ObjectId, graph.Ellipsis),
44+
Metadata: &v1.ResolverMeta{
45+
AtRevision: revision.String(),
46+
DepthRemaining: 50,
47+
},
48+
})
49+
50+
require.NoError(t, err)
51+
require.NotNil(t, checkResult)
52+
require.NotEmpty(t, checkResult.ResultsByResourceId, "expected membership for resource %s", tpl.ResourceAndRelation.ObjectId)
53+
require.Equal(t, v1.ResourceCheckResult_MEMBER, checkResult.ResultsByResourceId[tpl.ResourceAndRelation.ObjectId].Membership)
54+
}
55+
})
56+
57+
t.Run("lookup-resources", func(t *testing.T) {
58+
for _, tpl := range resources[:1] {
59+
stream := dispatch.NewCollectingDispatchStream[*v1.DispatchLookupResourcesResponse](ctx)
60+
err := dispatcher.DispatchLookupResources(&v1.DispatchLookupResourcesRequest{
61+
ObjectRelation: RR(tpl.ResourceAndRelation.Namespace, "view"),
62+
Subject: ONR(tpl.Subject.Namespace, tpl.Subject.ObjectId, graph.Ellipsis),
63+
Metadata: &v1.ResolverMeta{
64+
AtRevision: revision.String(),
65+
DepthRemaining: 50,
66+
},
67+
OptionalLimit: veryLargeLimit,
68+
}, stream)
69+
70+
require.NoError(t, err)
71+
72+
foundResources, _, _, _ := processResults(stream)
73+
require.Len(t, foundResources, 1)
74+
}
75+
})
76+
77+
t.Run("lookup-subjects", func(t *testing.T) {
78+
for _, tpl := range resources[:1] {
79+
stream := dispatch.NewCollectingDispatchStream[*v1.DispatchLookupSubjectsResponse](ctx)
80+
81+
err := dispatcher.DispatchLookupSubjects(&v1.DispatchLookupSubjectsRequest{
82+
ResourceRelation: RR(tpl.ResourceAndRelation.Namespace, "view"),
83+
ResourceIds: []string{tpl.ResourceAndRelation.ObjectId},
84+
SubjectRelation: RR(tpl.Subject.Namespace, graph.Ellipsis),
85+
Metadata: &v1.ResolverMeta{
86+
AtRevision: revision.String(),
87+
DepthRemaining: 50,
88+
},
89+
}, stream)
90+
91+
require.NoError(t, err)
92+
res := stream.Results()
93+
require.Len(t, res, 1)
94+
require.Len(t, res[0].FoundSubjectsByResourceId, 1)
95+
require.NotNil(t, res[0].FoundSubjectsByResourceId["res1"])
96+
require.Len(t, res[0].FoundSubjectsByResourceId["res1"].FoundSubjects, math.MaxUint16+1)
97+
}
98+
})
99+
}

pkg/genutil/slicez/chunking.go

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,12 @@ func ForEachChunkUntil[T any](data []T, chunkSize uint16, handler func(items []T
1818
chunkSize = 1
1919
}
2020

21-
dataLength := uint16(len(data))
22-
chunkCount := (dataLength / chunkSize) + 1
23-
for chunkIndex := uint16(0); chunkIndex < chunkCount; chunkIndex++ {
24-
chunkStart := chunkIndex * chunkSize
25-
chunkEnd := (chunkIndex + 1) * chunkSize
21+
dataLength := uint64(len(data))
22+
chunkSize64 := uint64(chunkSize)
23+
chunkCount := (dataLength / chunkSize64) + 1
24+
for chunkIndex := uint64(0); chunkIndex < chunkCount; chunkIndex++ {
25+
chunkStart := chunkIndex * chunkSize64
26+
chunkEnd := (chunkIndex + 1) * chunkSize64
2627
if chunkEnd > dataLength {
2728
chunkEnd = dataLength
2829
}
@@ -38,5 +39,6 @@ func ForEachChunkUntil[T any](data []T, chunkSize uint16, handler func(items []T
3839
}
3940
}
4041
}
42+
4143
return true, nil
4244
}

pkg/genutil/slicez/chunking_test.go

Lines changed: 53 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,23 +2,28 @@ package slicez
22

33
import (
44
"fmt"
5+
"math"
56
"testing"
67

78
"github.com/stretchr/testify/require"
89
)
910

1011
func TestForEachChunk(t *testing.T) {
12+
t.Parallel()
13+
1114
for _, datasize := range []int{0, 1, 5, 10, 50, 100, 250} {
1215
datasize := datasize
1316
for _, chunksize := range []uint16{1, 2, 3, 5, 10, 50} {
1417
chunksize := chunksize
1518
t.Run(fmt.Sprintf("test-%d-%d", datasize, chunksize), func(t *testing.T) {
16-
data := []int{}
19+
t.Parallel()
20+
21+
data := make([]int, 0, datasize)
1722
for i := 0; i < datasize; i++ {
1823
data = append(data, i)
1924
}
2025

21-
found := []int{}
26+
found := make([]int, 0, datasize)
2227
ForEachChunk(data, chunksize, func(items []int) {
2328
found = append(found, items...)
2429
require.True(t, len(items) <= int(chunksize))
@@ -29,3 +34,49 @@ func TestForEachChunk(t *testing.T) {
2934
}
3035
}
3136
}
37+
38+
func TestForEachChunkOverflowPanic(t *testing.T) {
39+
t.Parallel()
40+
41+
datasize := math.MaxUint16
42+
chunksize := uint16(50)
43+
data := make([]int, 0, datasize)
44+
for i := 0; i < datasize; i++ {
45+
data = append(data, i)
46+
}
47+
48+
found := make([]int, 0, datasize)
49+
ForEachChunk(data, chunksize, func(items []int) {
50+
found = append(found, items...)
51+
require.True(t, len(items) <= int(chunksize))
52+
require.True(t, len(items) > 0)
53+
})
54+
55+
require.Equal(t, data, found)
56+
}
57+
58+
func TestForEachChunkOverflowIncorrect(t *testing.T) {
59+
t.Parallel()
60+
61+
chunksize := uint16(50)
62+
for _, datasize := range []int{math.MaxUint16 + int(chunksize), 10_000_000} {
63+
datasize := datasize
64+
t.Run(fmt.Sprintf("test-%d-%d", datasize, chunksize), func(t *testing.T) {
65+
t.Parallel()
66+
67+
data := make([]int, 0, datasize)
68+
for i := 0; i < datasize; i++ {
69+
data = append(data, i)
70+
}
71+
72+
found := make([]int, 0, datasize)
73+
ForEachChunk(data, chunksize, func(items []int) {
74+
found = append(found, items...)
75+
require.True(t, len(items) <= int(chunksize))
76+
require.True(t, len(items) > 0)
77+
})
78+
79+
require.Equal(t, data, found)
80+
})
81+
}
82+
}

0 commit comments

Comments
 (0)