mirror of
https://github.com/kemko/nomad.git
synced 2026-01-08 11:25:41 +03:00
paginator: fix tokenizer comparison of composite index and ID
The `CreateIndexAndIDTokenizer` creates a composite token by combining the create index value and ID from the object with a `.`. Tokens are then compared lexicographically. The comparison is appropriate for the ID segment of the token, but it is not for the create index segement. Since the create index values are stored with numeric ordering, using a lexicographical comparison can cause unexpected results. For example, when comparing the token `12.object-id` to `102.object-id` the result will show `12.object-id` being greater. This is the correct comparison but it is incorrect for the intention of the token. With the knowledge of the composition of the token, the response should be that `12.object-id` is less. The unexpected behavior can be seen when performing lists (like listing allocations). The behavior is encountered inconsistently due to two requirements which must be met: 1. Create index values with a large enough span (ex: 12 and 102) 2. Correct per page value to get a "bad" next token (ex: prefix with 102) To prevent the unexpected behavior, the target token is split and the components are used individually to compare against the object. Fixes #25435
This commit is contained in:
3
.changelog/25792.txt
Normal file
3
.changelog/25792.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
```release-note:bug
|
||||
paginator: fix tokenizer comparison of composite index and ID
|
||||
```
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"cmp"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Tokenizer is the interface that must be implemented to provide pagination
|
||||
@@ -45,7 +46,32 @@ func CreateIndexAndIDTokenizer[T idAndCreateIndexGetter](target string) Tokenize
|
||||
index := item.GetCreateIndex()
|
||||
id := item.GetID()
|
||||
token := fmt.Sprintf("%d.%s", index, id)
|
||||
return token, cmp.Compare(token, target)
|
||||
|
||||
// Split the target to extract the create index and the ID values.
|
||||
targetParts := strings.SplitN(target, ".", 2)
|
||||
// If the target wasn't composed of both parts, directly compare.
|
||||
if len(targetParts) < 2 {
|
||||
return token, cmp.Compare(token, target)
|
||||
}
|
||||
|
||||
// Convert the create index to an integer for comparison. This
|
||||
// prevents a lexigraphical comparison of the create index which
|
||||
// can cause unexpected results when comparing index values like
|
||||
// '12' and '102'. If the index cannot be converted to an integer,
|
||||
// fall back to direct comparison.
|
||||
targetIndex, err := strconv.Atoi(targetParts[0])
|
||||
if err != nil {
|
||||
return token, cmp.Compare(token, target)
|
||||
}
|
||||
|
||||
indexCmp := cmp.Compare(index, uint64(targetIndex))
|
||||
if indexCmp != 0 {
|
||||
return token, indexCmp
|
||||
}
|
||||
|
||||
// If the index values are equivalent use the ID values
|
||||
// as the comparison.
|
||||
return token, cmp.Compare(id, targetParts[1])
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -52,3 +52,94 @@ func TestTokenizer(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateIndexAndIDTokenizer(t *testing.T) {
|
||||
ci.Parallel(t)
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
obj *mockCreateIndexObject
|
||||
target string
|
||||
expectedToken string
|
||||
expectedCmp int
|
||||
}{
|
||||
{
|
||||
name: "common index (less)",
|
||||
obj: newMockCreateIndexObject(12, "aaa-bbb-ccc"),
|
||||
target: "12.bbb-ccc-ddd",
|
||||
expectedToken: "12.aaa-bbb-ccc",
|
||||
expectedCmp: -1,
|
||||
},
|
||||
{
|
||||
name: "common index (greater)",
|
||||
obj: newMockCreateIndexObject(12, "bbb-ccc-ddd"),
|
||||
target: "12.aaa-bbb-ccc",
|
||||
expectedToken: "12.bbb-ccc-ddd",
|
||||
expectedCmp: 1,
|
||||
},
|
||||
{
|
||||
name: "common index (equal)",
|
||||
obj: newMockCreateIndexObject(12, "bbb-ccc-ddd"),
|
||||
target: "12.bbb-ccc-ddd",
|
||||
expectedToken: "12.bbb-ccc-ddd",
|
||||
expectedCmp: 0,
|
||||
},
|
||||
{
|
||||
name: "less index",
|
||||
obj: newMockCreateIndexObject(12, "aaa-bbb-ccc"),
|
||||
target: "89.aaa-bbb-ccc",
|
||||
expectedToken: "12.aaa-bbb-ccc",
|
||||
expectedCmp: -1,
|
||||
},
|
||||
{
|
||||
name: "greater index",
|
||||
obj: newMockCreateIndexObject(89, "aaa-bbb-ccc"),
|
||||
target: "12.aaa-bbb-ccc",
|
||||
expectedToken: "89.aaa-bbb-ccc",
|
||||
expectedCmp: 1,
|
||||
},
|
||||
{
|
||||
name: "common index start (less)",
|
||||
obj: newMockCreateIndexObject(12, "aaa-bbb-ccc"),
|
||||
target: "102.aaa-bbb-ccc",
|
||||
expectedToken: "12.aaa-bbb-ccc",
|
||||
expectedCmp: -1,
|
||||
},
|
||||
{
|
||||
name: "common index start (greater)",
|
||||
obj: newMockCreateIndexObject(102, "aaa-bbb-ccc"),
|
||||
target: "12.aaa-bbb-ccc",
|
||||
expectedToken: "102.aaa-bbb-ccc",
|
||||
expectedCmp: 1,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
fn := CreateIndexAndIDTokenizer[*mockCreateIndexObject](tc.target)
|
||||
actualToken, actualCmp := fn(tc.obj)
|
||||
must.Eq(t, tc.expectedToken, actualToken)
|
||||
must.Eq(t, tc.expectedCmp, actualCmp)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func newMockCreateIndexObject(createIndex uint64, id string) *mockCreateIndexObject {
|
||||
return &mockCreateIndexObject{
|
||||
createIndex: createIndex,
|
||||
id: id,
|
||||
}
|
||||
}
|
||||
|
||||
type mockCreateIndexObject struct {
|
||||
createIndex uint64
|
||||
id string
|
||||
}
|
||||
|
||||
func (m *mockCreateIndexObject) GetCreateIndex() uint64 {
|
||||
return m.createIndex
|
||||
}
|
||||
|
||||
func (m *mockCreateIndexObject) GetID() string {
|
||||
return m.id
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user