chore: linting (#15813)

This commit is contained in:
Marko 2023-04-12 14:36:42 +02:00 committed by GitHub
parent 00e1f85eca
commit 37032fb948
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
35 changed files with 150 additions and 138 deletions

View File

@ -41,10 +41,9 @@ func ValuesOf(values ...interface{}) []protoreflect.Value {
// this allows us to use imported messages, such as timestamppb.Timestamp
// in iterators.
value := values[i]
switch value.(type) {
case protoreflect.ProtoMessage:
if v, ok := value.(protoreflect.ProtoMessage); ok {
if !reflect.ValueOf(value).IsNil() {
value = value.(protoreflect.ProtoMessage).ProtoReflect()
value = v.ProtoReflect()
} else {
value = nil
}

View File

@ -37,11 +37,12 @@ func (b BoolCodec) Compare(v1, v2 protoreflect.Value) int {
if v2.IsValid() {
b2 = v2.Bool()
}
if b1 == b2 {
switch {
case b1 == b2:
return 0
} else if b1 {
case b1:
return -1
} else {
default:
return 1
}
}

View File

@ -72,15 +72,16 @@ func GetCodec(field protoreflect.FieldDescriptor, nonTerminal bool) (Codec, erro
case protoreflect.BytesKind:
if nonTerminal {
return NonTerminalBytesCodec{}, nil
} else {
return BytesCodec{}, nil
}
return BytesCodec{}, nil
case protoreflect.StringKind:
if nonTerminal {
return NonTerminalStringCodec{}, nil
} else {
return StringCodec{}, nil
}
return StringCodec{}, nil
case protoreflect.Uint32Kind:
return CompactUint32Codec{}, nil
case protoreflect.Fixed32Kind:

View File

@ -108,11 +108,12 @@ func TestCompactUInt32(t *testing.T) {
by := ormfield.EncodeCompactUint32(y)
cmp := bytes.Compare(bx, by)
if x < y {
switch {
case x < y:
assert.Equal(t, -1, cmp)
} else if x == y {
case x == y:
assert.Equal(t, 0, cmp)
} else {
default:
assert.Equal(t, 1, cmp)
}
@ -156,11 +157,12 @@ func TestCompactUInt64(t *testing.T) {
by := ormfield.EncodeCompactUint64(y)
cmp := bytes.Compare(bx, by)
if x < y {
switch {
case x < y:
assert.Equal(t, -1, cmp)
} else if x == y {
case x == y:
assert.Equal(t, 0, cmp)
} else {
default:
assert.Equal(t, 1, cmp)
}

View File

@ -51,9 +51,9 @@ func (d DurationCodec) Compare(v1, v2 protoreflect.Value) int {
c := compareInt(s1, s2)
if c != 0 {
return c
} else {
return compareInt(n1, n2)
}
return compareInt(n1, n2)
}
func (d DurationCodec) IsOrdered() bool {

View File

@ -34,11 +34,12 @@ func (e EnumCodec) Compare(v1, v2 protoreflect.Value) int {
if v2.IsValid() {
y = v2.Enum()
}
if x == y {
switch {
case x == y:
return 0
} else if x < y {
case x < y:
return -1
} else {
default:
return 1
}
}

View File

@ -22,10 +22,10 @@ func (i Int64Codec) Decode(r Reader) (protoreflect.Value, error) {
if x >= int64Max {
x = x - int64Max - 1
return protoreflect.ValueOfInt64(int64(x)), err
} else {
y := int64(x) - int64Max - 1
return protoreflect.ValueOfInt64(y), err
}
y := int64(x) - int64Max - 1
return protoreflect.ValueOfInt64(y), err
}
func (i Int64Codec) Encode(value protoreflect.Value, w io.Writer) error {
@ -36,11 +36,11 @@ func (i Int64Codec) Encode(value protoreflect.Value, w io.Writer) error {
if x >= -1 {
y := uint64(x) + int64Max + 1
return binary.Write(w, binary.BigEndian, y)
} else {
x += int64Max
x += 1
return binary.Write(w, binary.BigEndian, uint64(x))
}
x += int64Max
x++
return binary.Write(w, binary.BigEndian, uint64(x))
}
func (i Int64Codec) Compare(v1, v2 protoreflect.Value) int {
@ -67,11 +67,12 @@ func compareInt(v1, v2 protoreflect.Value) int {
if v2.IsValid() {
y = v2.Int()
}
if x == y {
switch {
case x == y:
return 0
} else if x < y {
case x < y:
return -1
} else {
default:
return 1
}
}

View File

@ -72,7 +72,7 @@ func (t TimestampCodec) Encode(value protoreflect.Value, w io.Writer) error {
nanosBz[i] = byte(nanosInt)
nanosInt >>= 8
}
nanosBz[0] = nanosBz[0] | 0xC0
nanosBz[0] |= 0xC0
_, err = w.Write(nanosBz[:])
return err
}
@ -151,9 +151,9 @@ func (t TimestampCodec) Compare(v1, v2 protoreflect.Value) int {
c := compareInt(s1, s2)
if c != 0 {
return c
} else {
return compareInt(n1, n2)
}
return compareInt(n1, n2)
}
func (t TimestampCodec) IsOrdered() bool {
@ -215,9 +215,9 @@ func (t TimestampV0Codec) Compare(v1, v2 protoreflect.Value) int {
c := compareInt(s1, s2)
if c != 0 {
return c
} else {
return compareInt(n1, n2)
}
return compareInt(n1, n2)
}
func (t TimestampV0Codec) IsOrdered() bool {

View File

@ -49,11 +49,12 @@ func compareUint(v1, v2 protoreflect.Value) int {
if v2.IsValid() {
y = v2.Uint()
}
if x == y {
switch {
case x == y:
return 0
} else if x < y {
case x < y:
return -1
} else {
default:
return 1
}
}

View File

@ -43,14 +43,14 @@ func (p *PrimaryKeyEntry) GetTableName() protoreflect.FullName {
func (p *PrimaryKeyEntry) String() string {
if p.Value == nil {
return fmt.Sprintf("PK %s %s -> _", p.TableName, fmtValues(p.Key))
} else {
valBz, err := stablejson.Marshal(p.Value)
valStr := string(valBz)
if err != nil {
valStr = fmt.Sprintf("ERR %v", err)
}
return fmt.Sprintf("PK %s %s -> %s", p.TableName, fmtValues(p.Key), valStr)
}
valBz, err := stablejson.Marshal(p.Value)
valStr := string(valBz)
if err != nil {
valStr = fmt.Sprintf("ERR %v", err)
}
return fmt.Sprintf("PK %s %s -> %s", p.TableName, fmtValues(p.Key), valStr)
}
func fmtValues(values []protoreflect.Value) string {
@ -108,9 +108,9 @@ func fmtFields(fields []protoreflect.Name) string {
func (i *IndexKeyEntry) String() string {
if i.IsUnique {
return fmt.Sprintf("UNIQ %s", i.string())
} else {
return fmt.Sprintf("IDX %s", i.string())
}
return fmt.Sprintf("IDX %s", i.string())
}
// SeqEntry represents a sequence for tables with auto-incrementing primary keys.

View File

@ -177,11 +177,12 @@ func (cdc *KeyCodec) CompareKeys(values1, values2 []protoreflect.Value) int {
}
// values are equal but arrays of different length
if j == k {
switch {
case j == k:
return 0
} else if j < k {
case j < k:
return -1
} else {
default:
return 1
}
}
@ -247,19 +248,20 @@ func (cdc KeyCodec) CheckValidRangeIterationKeys(start, end []protoreflect.Value
y := end[i]
cmp = fieldCdc.Compare(x, y)
if cmp > 0 {
switch {
case cmp > 0:
return ormerrors.InvalidRangeIterationKeys.Wrapf(
"start must be before end for field %s",
cdc.fieldDescriptors[i].FullName(),
)
} else if !fieldCdc.IsOrdered() && cmp != 0 {
case !fieldCdc.IsOrdered() && cmp != 0:
descriptor := cdc.fieldDescriptors[i]
return ormerrors.InvalidRangeIterationKeys.Wrapf(
"field %s of kind %s doesn't support ordered range iteration",
descriptor.FullName(),
descriptor.Kind(),
)
} else if cmp < 0 {
case cmp < 0:
break
}
}

View File

@ -107,12 +107,12 @@ func (u UniqueKeyCodec) DecodeIndexKey(k, v []byte) (indexFields, primaryKey []p
return ks, pk, nil
}
func (cdc UniqueKeyCodec) extractPrimaryKey(keyValues, valueValues []protoreflect.Value) []protoreflect.Value {
numPkFields := len(cdc.pkFieldOrder)
func (u UniqueKeyCodec) extractPrimaryKey(keyValues, valueValues []protoreflect.Value) []protoreflect.Value {
numPkFields := len(u.pkFieldOrder)
pkValues := make([]protoreflect.Value, numPkFields)
for i := 0; i < numPkFields; i++ {
fo := cdc.pkFieldOrder[i]
fo := u.pkFieldOrder[i]
if fo.inKey {
pkValues[i] = keyValues[fo.i]
} else {
@ -160,11 +160,10 @@ func (u UniqueKeyCodec) EncodeEntry(entry Entry) (k, v []byte, err error) {
if !fieldOrder.inKey {
// goes in values because it is not present in the index key otherwise
values = append(values, value)
} else {
// does not go in values, but we need to verify that the value in index values matches the primary key value
if u.keyCodec.fieldCodecs[fieldOrder.i].Compare(value, indexEntry.IndexValues[fieldOrder.i]) != 0 {
return nil, nil, ormerrors.BadDecodeEntry.Wrapf("value in primary key does not match corresponding value in index key")
}
}
// does not go in values, but we need to verify that the value in index values matches the primary key value
if u.keyCodec.fieldCodecs[fieldOrder.i].Compare(value, indexEntry.IndexValues[fieldOrder.i]) != 0 {
return nil, nil, ormerrors.BadDecodeEntry.Wrapf("value in primary key does not match corresponding value in index key")
}
}

View File

@ -1,3 +1,4 @@
// nolint"unused" // ignore unused code linting
package codegen
import (

View File

@ -1,3 +1,4 @@
// nolint:unused // ignore unused code linting
package codegen
import (

View File

@ -302,7 +302,7 @@ func (w *writer) F(format string, args ...interface{}) {
}
func (w *writer) Indent() {
w.indent += 1
w.indent++
w.updateIndent()
}
@ -314,6 +314,6 @@ func (w *writer) updateIndent() {
}
func (w *writer) Dedent() {
w.indent -= 1
w.indent--
w.updateIndent()
}

View File

@ -1,3 +1,4 @@
//nolint:unused // ignore unused code linting
package codegen
import (

View File

@ -235,12 +235,12 @@ func (d debugHooks) ValidateInsert(context context.Context, message proto.Messag
}
func (d debugHooks) ValidateUpdate(ctx context.Context, existing, new proto.Message) error {
existingJson, err := stablejson.Marshal(existing)
existingJSON, err := stablejson.Marshal(existing)
if err != nil {
return err
}
newJson, err := stablejson.Marshal(new)
newJSON, err := stablejson.Marshal(new)
if err != nil {
return err
}
@ -248,8 +248,8 @@ func (d debugHooks) ValidateUpdate(ctx context.Context, existing, new proto.Mess
d.debugger.Log(fmt.Sprintf(
"ORM BEFORE UPDATE %s %s -> %s",
existing.ProtoReflect().Descriptor().FullName(),
existingJson,
newJson,
existingJSON,
newJSON,
))
if d.validateHooks != nil {
return d.validateHooks.ValidateUpdate(ctx, existing, new)
@ -291,12 +291,12 @@ func (d debugHooks) OnInsert(ctx context.Context, message proto.Message) {
}
func (d debugHooks) OnUpdate(ctx context.Context, existing, new proto.Message) {
existingJson, err := stablejson.Marshal(existing)
existingJSON, err := stablejson.Marshal(existing)
if err != nil {
panic(err)
}
newJson, err := stablejson.Marshal(new)
newJSON, err := stablejson.Marshal(new)
if err != nil {
panic(err)
}
@ -304,8 +304,8 @@ func (d debugHooks) OnUpdate(ctx context.Context, existing, new proto.Message) {
d.debugger.Log(fmt.Sprintf(
"ORM AFTER UPDATE %s %s -> %s",
existing.ProtoReflect().Descriptor().FullName(),
existingJson,
newJson,
existingJSON,
newJSON,
))
if d.writeHooks != nil {
d.writeHooks.OnUpdate(ctx, existing, new)

View File

@ -30,7 +30,7 @@ type fileDescriptorDBOptions struct {
type fileDescriptorDB struct {
id uint32
prefix []byte
tablesById map[uint32]ormtable.Table
tablesByID map[uint32]ormtable.Table
tablesByName map[protoreflect.FullName]ormtable.Table
fileDescriptor protoreflect.FileDescriptor
}
@ -41,7 +41,7 @@ func newFileDescriptorDB(fileDescriptor protoreflect.FileDescriptor, options fil
schema := &fileDescriptorDB{
id: options.ID,
prefix: prefix,
tablesById: map[uint32]ormtable.Table{},
tablesByID: map[uint32]ormtable.Table{},
tablesByName: map[protoreflect.FullName]ormtable.Table{},
fileDescriptor: fileDescriptor,
}
@ -76,10 +76,10 @@ func newFileDescriptorDB(fileDescriptor protoreflect.FileDescriptor, options fil
}
id := table.ID()
if _, ok := schema.tablesById[id]; ok {
if _, ok := schema.tablesByID[id]; ok {
return nil, ormerrors.InvalidTableId.Wrapf("duplicate ID %d for %s", id, tableName)
}
schema.tablesById[id] = table
schema.tablesByID[id] = table
if _, ok := schema.tablesByName[tableName]; ok {
return nil, ormerrors.InvalidTableDefinition.Wrapf("duplicate table %s", tableName)
@ -106,7 +106,7 @@ func (f fileDescriptorDB) DecodeEntry(k, v []byte) (ormkv.Entry, error) {
return nil, ormerrors.UnexpectedDecodePrefix.Wrapf("uint32 varint id out of range %d", id)
}
table, ok := f.tablesById[uint32(id)]
table, ok := f.tablesByID[uint32(id)]
if !ok {
return nil, ormerrors.UnexpectedDecodePrefix.Wrapf("can't find table with id %d", id)
}

View File

@ -47,7 +47,7 @@ type ModuleDB interface {
type moduleDB struct {
prefix []byte
filesById map[uint32]*fileDescriptorDB
filesByID map[uint32]*fileDescriptorDB
tablesByName map[protoreflect.FullName]ormtable.Table
}
@ -83,7 +83,7 @@ func NewModuleDB(schema *ormv1alpha1.ModuleSchemaDescriptor, options ModuleDBOpt
prefix := schema.Prefix
db := &moduleDB{
prefix: prefix,
filesById: map[uint32]*fileDescriptorDB{},
filesByID: map[uint32]*fileDescriptorDB{},
tablesByName: map[protoreflect.FullName]ormtable.Table{},
}
@ -162,7 +162,7 @@ func NewModuleDB(schema *ormv1alpha1.ModuleSchemaDescriptor, options ModuleDBOpt
return nil, err
}
db.filesById[id] = fdSchema
db.filesByID[id] = fdSchema
for name, table := range fdSchema.tablesByName {
if _, ok := db.tablesByName[name]; ok {
return nil, ormerrors.UnexpectedError.Wrapf("duplicate table %s", name)
@ -191,7 +191,7 @@ func (m moduleDB) DecodeEntry(k, v []byte) (ormkv.Entry, error) {
return nil, ormerrors.UnexpectedDecodePrefix.Wrapf("uint32 varint id out of range %d", id)
}
fileSchema, ok := m.filesById[uint32(id)]
fileSchema, ok := m.filesByID[uint32(id)]
if !ok {
return nil, ormerrors.UnexpectedDecodePrefix.Wrapf("can't find FileDescriptor schema with id %d", id)
}

View File

@ -89,7 +89,7 @@ func (k keeper) Mint(ctx context.Context, acct, denom string, amount uint64) err
if supply == nil {
supply = &testpb.Supply{Denom: denom, Amount: amount}
} else {
supply.Amount = supply.Amount + amount
supply.Amount += amount
}
err = k.store.SupplyTable().Save(ctx, supply)
@ -111,7 +111,7 @@ func (k keeper) Burn(ctx context.Context, acct, denom string, amount uint64) err
return fmt.Errorf("insufficient supply")
}
supply.Amount = supply.Amount - amount
supply.Amount -= amount
if supply.Amount == 0 {
err = supplyStore.Delete(ctx, supply)
@ -183,9 +183,9 @@ func (k keeper) safeSubBalance(ctx context.Context, acct, denom string, amount u
if balance.Amount == 0 {
return balanceStore.Delete(ctx, balance)
} else {
return balanceStore.Save(ctx, balance)
}
return balanceStore.Save(ctx, balance)
}
func TestModuleDB(t *testing.T) {
@ -227,13 +227,13 @@ func TestModuleDB(t *testing.T) {
// check JSON
target := genesis.RawJSONTarget{}
assert.NilError(t, db.GenesisHandler().DefaultGenesis(target.Target()))
rawJson, err := target.JSON()
rawJSON, err := target.JSON()
assert.NilError(t, err)
golden.Assert(t, string(rawJson), "default_json.golden")
golden.Assert(t, string(rawJSON), "default_json.golden")
target = genesis.RawJSONTarget{}
assert.NilError(t, db.GenesisHandler().ExportGenesis(ctx, target.Target()))
rawJson, err = target.JSON()
rawJSON, err = target.JSON()
assert.NilError(t, err)
goodJSON := `{
@ -255,14 +255,14 @@ func TestModuleDB(t *testing.T) {
backend2 := ormtest.NewMemoryBackend()
ctx2 := ormtable.WrapContextDefault(backend2)
source, err = genesis.SourceFromRawJSON(rawJson)
source, err = genesis.SourceFromRawJSON(rawJSON)
assert.NilError(t, err)
assert.NilError(t, db.GenesisHandler().ValidateGenesis(source))
assert.NilError(t, db.GenesisHandler().InitGenesis(ctx2, source))
testkv.AssertBackendsEqual(t, backend, backend2)
}
func runSimpleBankTests(t *testing.T, k Keeper, ctx context.Context) {
func runSimpleBankTests(t *testing.T, k Keeper, ctx context.Context) { // nolint:revive // test function
// mint coins
denom := "foo"
acct1 := "bob"

View File

@ -130,7 +130,7 @@ func (t autoIncrementTable) EncodeEntry(entry ormkv.Entry) (k, v []byte, err err
}
func (t autoIncrementTable) ValidateJSON(reader io.Reader) error {
return t.decodeAutoIncJson(nil, reader, func(message proto.Message, maxSeq uint64) error {
return t.decodeAutoIncJSON(nil, reader, func(message proto.Message, maxSeq uint64) error {
messageRef := message.ProtoReflect()
pkey := messageRef.Get(t.autoIncField).Uint()
if pkey > maxSeq {
@ -152,7 +152,7 @@ func (t autoIncrementTable) ImportJSON(ctx context.Context, reader io.Reader) er
return err
}
return t.decodeAutoIncJson(backend, reader, func(message proto.Message, maxSeq uint64) error {
return t.decodeAutoIncJSON(backend, reader, func(message proto.Message, maxSeq uint64) error {
messageRef := message.ProtoReflect()
pkey := messageRef.Get(t.autoIncField).Uint()
if pkey == 0 {
@ -160,21 +160,21 @@ func (t autoIncrementTable) ImportJSON(ctx context.Context, reader io.Reader) er
// generate one
_, err = t.save(ctx, backend, message, saveModeInsert)
return err
} else {
if pkey > maxSeq {
return fmt.Errorf("invalid auto increment primary key %d, expected a value <= %d, the highest "+
"sequence number", pkey, maxSeq)
}
// we do have a primary key and calling Save will fail because it expects
// either no primary key or SAVE_MODE_UPDATE. So instead we drop one level
// down and insert using tableImpl which doesn't know about
// auto-incrementing primary keys.
return t.tableImpl.save(ctx, backend, message, saveModeInsert)
}
if pkey > maxSeq {
return fmt.Errorf("invalid auto increment primary key %d, expected a value <= %d, the highest "+
"sequence number", pkey, maxSeq)
}
// we do have a primary key and calling Save will fail because it expects
// either no primary key or SAVE_MODE_UPDATE. So instead we drop one level
// down and insert using tableImpl which doesn't know about
// auto-incrementing primary keys.
return t.tableImpl.save(ctx, backend, message, saveModeInsert)
})
}
func (t autoIncrementTable) decodeAutoIncJson(backend Backend, reader io.Reader, onMsg func(message proto.Message, maxID uint64) error) error {
func (t autoIncrementTable) decodeAutoIncJSON(backend Backend, reader io.Reader, onMsg func(message proto.Message, maxID uint64) error) error {
decoder, err := t.startDecodeJson(reader)
if err != nil {
return err

View File

@ -42,7 +42,7 @@ func TestAutoIncrementScenario(t *testing.T) {
checkEncodeDecodeEntries(t, table, store.IndexStoreReader())
}
func runAutoIncrementScenario(t *testing.T, table ormtable.AutoIncrementTable, ctx context.Context) {
func runAutoIncrementScenario(t *testing.T, table ormtable.AutoIncrementTable, ctx context.Context) { //nolint:revive // ignore linting on testing function signature
store, err := testpb.NewExampleAutoIncrementTableTable(table)
assert.NilError(t, err)
@ -57,10 +57,10 @@ func runAutoIncrementScenario(t *testing.T, table ormtable.AutoIncrementTable, c
assert.Equal(t, curSeq, uint64(1))
ex2 := &testpb.ExampleAutoIncrementTable{X: "bar", Y: 10}
newId, err := table.InsertReturningPKey(ctx, ex2)
newID, err := table.InsertReturningPKey(ctx, ex2)
assert.NilError(t, err)
assert.Equal(t, uint64(2), ex2.Id)
assert.Equal(t, newId, ex2.Id)
assert.Equal(t, newID, ex2.Id)
curSeq, err = table.LastInsertedSequence(ctx)
assert.NilError(t, err)
assert.Equal(t, curSeq, uint64(2))

View File

@ -84,12 +84,12 @@ func (it *paginationIterator) Next() bool {
if ok {
it.i++
return true
} else {
it.pageRes = &queryv1beta1.PageResponse{
Total: uint64(it.i),
}
return false
}
it.pageRes = &queryv1beta1.PageResponse{
Total: uint64(it.i),
}
return false
}
func (it paginationIterator) PageResponse() *queryv1beta1.PageResponse {

View File

@ -218,8 +218,8 @@ func (t tableImpl) doDecodeJson(decoder *json.Decoder, onFirst func(message json
first := true
for decoder.More() {
var rawJson json.RawMessage
err := decoder.Decode(&rawJson)
var rawJSON json.RawMessage
err := decoder.Decode(&rawJSON)
if err != nil {
return ormerrors.JSONImportError.Wrapf("%s", err)
}
@ -227,7 +227,7 @@ func (t tableImpl) doDecodeJson(decoder *json.Decoder, onFirst func(message json
if first {
first = false
if onFirst != nil {
if onFirst(rawJson) {
if onFirst(rawJSON) {
// if onFirst handled this, skip decoding into a proto message
continue
}
@ -235,7 +235,7 @@ func (t tableImpl) doDecodeJson(decoder *json.Decoder, onFirst func(message json
}
msg := t.MessageType().New().Interface()
err = unmarshalOptions.Unmarshal(rawJson, msg)
err = unmarshalOptions.Unmarshal(rawJSON, msg)
if err != nil {
return err
}
@ -283,9 +283,9 @@ func (t tableImpl) ValidateJSON(reader io.Reader) error {
return t.decodeJson(reader, func(message proto.Message) error {
if t.customJSONValidator != nil {
return t.customJSONValidator(message)
} else {
return DefaultJSONValidator(message)
}
return DefaultJSONValidator(message)
})
}

View File

@ -473,7 +473,7 @@ func runTestScenario(t *testing.T, table ormtable.Table, backend ormtable.Backen
// now let's update some things
for i := 0; i < 5; i++ {
data[i].U64 = data[i].U64 * 2
data[i].U64 *= 2
data[i].Bz = []byte(data[i].Str)
err = store.Update(ctx, data[i])
assert.NilError(t, err)
@ -763,9 +763,7 @@ func (m *IndexModel) Less(i, j int) bool {
}
func (m *IndexModel) Swap(i, j int) {
x := m.data[i]
m.data[i] = m.data[j]
m.data[j] = x
m.data[i], m.data[j] = m.data[j], m.data[i]
}
var _ sort.Interface = &IndexModel{}

View File

@ -104,6 +104,7 @@ func TestDecode(t *testing.T) {
require.True(t, ok)
err = txBuilder.SetMsgs(gogoMsg)
require.NoError(t, err)
txBuilder.SetFeeAmount(fee)
txBuilder.SetGasLimit(gas)
txBuilder.SetMemo(memo)
@ -112,6 +113,7 @@ func TestDecode(t *testing.T) {
tx := txBuilder.GetTx()
txBytes, err := encCfg.TxConfig.TxEncoder()(tx)
require.NoError(t, err)
decodeCtx, err := decode.NewDecoder(decode.Options{})
require.NoError(t, err)
decodedTx, err := decodeCtx.Decode(txBytes)

View File

@ -2,11 +2,12 @@ package query
import (
"context"
"errors"
"fmt"
"cosmossdk.io/collections"
collcodec "cosmossdk.io/collections/codec"
storetypes "cosmossdk.io/store/types"
"errors"
"fmt"
)
// WithCollectionPaginationPairPrefix applies a prefix to a collection, whose key is a collection.Pair,
@ -276,7 +277,7 @@ func encodeCollKey[K, V any, C Collection[K, V]](coll C, key K) ([]byte, error)
return buffer, err
}
func getCollIter[K, V any, C Collection[K, V]](ctx context.Context, coll C, prefix []byte, start []byte, reverse bool) (collections.Iterator[K, V], error) {
func getCollIter[K, V any, C Collection[K, V]](ctx context.Context, coll C, prefix, start []byte, reverse bool) (collections.Iterator[K, V], error) {
// TODO: maybe can be simplified
if reverse {
var end []byte

View File

@ -42,7 +42,7 @@ func (suite *TestSuite) TestGrant() {
errMsg string
}{
{
name: "indentical grantee and granter",
name: "identical grantee and granter",
malleate: func() *authz.MsgGrant {
grant, err := authz.NewGrant(curBlockTime, banktypes.NewSendAuthorization(coins, nil), &oneYear)
suite.Require().NoError(err)
@ -217,7 +217,7 @@ func (suite *TestSuite) TestRevoke() {
errMsg string
}{
{
name: "indentical grantee and granter",
name: "identical grantee and granter",
malleate: func() *authz.MsgRevoke {
return &authz.MsgRevoke{
Granter: grantee.String(),

View File

@ -1,9 +1,10 @@
package keeper
import (
"cosmossdk.io/collections"
"fmt"
"cosmossdk.io/collections"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/types/query"
"github.com/cosmos/cosmos-sdk/x/bank/types"

View File

@ -2,6 +2,7 @@ package keeper
import (
"context"
"cosmossdk.io/collections"
"cosmossdk.io/math"
"google.golang.org/grpc/codes"
@ -90,7 +91,6 @@ func (k BaseKeeper) SpendableBalances(ctx context.Context, req *types.QuerySpend
balances = append(balances, sdk.NewCoin(key.K2(), zeroAmt))
return false, nil // not including results as they're appended here
}, query.WithCollectionPaginationPairPrefix[sdk.AccAddress, string](addr))
if err != nil {
return nil, status.Errorf(codes.InvalidArgument, "paginate: %v", err)
}

View File

@ -1,9 +1,10 @@
package keeper
import (
"cosmossdk.io/collections"
"fmt"
"cosmossdk.io/collections"
errorsmod "cosmossdk.io/errors"
storetypes "cosmossdk.io/store/types"

View File

@ -1,8 +1,10 @@
package keeper
import (
"cosmossdk.io/collections/indexes"
"fmt"
"cosmossdk.io/collections/indexes"
"github.com/cockroachdb/errors"
"cosmossdk.io/collections"
@ -44,7 +46,7 @@ func newBalancesIndexes(sb *collections.SchemaBuilder) BalancesIndexes {
return BalancesIndexes{
Denom: indexes.NewReversePair[math.Int](
sb, types.DenomAddressPrefix, "address_by_denom_index",
collections.PairKeyCodec(sdk.AddressKeyAsIndexKey(sdk.AccAddressKey), collections.StringKey), // NOTE: refer to the AddressKeyAsIndexKey docs to understand why we do this.
collections.PairKeyCodec(sdk.AddressKeyAsIndexKey(sdk.AccAddressKey), collections.StringKey), // nolint:staticcheck // Note: refer to the AddressKeyAsIndexKey docs to understand why we do this.
),
}
}

View File

@ -136,6 +136,6 @@ func pruneZeroSupply(store storetypes.KVStore) error {
// CreatePrefixedAccountStoreKey returns the key for the given account and denomination.
// This method can be used when performing an ABCI query for the balance of an account.
func CreatePrefixedAccountStoreKey(addr []byte, denom []byte) []byte {
func CreatePrefixedAccountStoreKey(addr, denom []byte) []byte {
return append(CreateAccountBalancesPrefix(addr), denom...)
}

View File

@ -1,11 +1,12 @@
package types
import (
"testing"
"cosmossdk.io/collections/colltest"
"cosmossdk.io/math"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/stretchr/testify/require"
"testing"
)
func TestBalanceValueCodec(t *testing.T) {

View File

@ -8,11 +8,7 @@ import (
"github.com/cosmos/cosmos-sdk/types/kv"
)
const (
addrLen = 20
MissedBlockBitmapChunkSize = 1024 // 2^10 bits
)
const MissedBlockBitmapChunkSize = 1024 // 2^10 bits
var (
ValidatorSigningInfoKeyPrefix = []byte{0x01}