chore: bump golangci-lint and fix all linting issues (#21761)

This commit is contained in:
Julien Robert 2024-09-16 21:11:19 +02:00 committed by GitHub
parent 52ba264c80
commit cbdfd9bdfa
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
186 changed files with 114 additions and 455 deletions

View File

@ -13,7 +13,7 @@ linters:
- dogsled
- errcheck
- errorlint
- exportloopref
- copyloopvar
- gci
- goconst
- gocritic
@ -45,15 +45,15 @@ issues:
- crypto/keys/secp256k1/internal/*
- types/coin_regex.go
exclude-rules:
- text: "ST1003:"
- text: "ST1003:" # We are fine with our current naming
linters:
- stylecheck
# FIXME: Disabled until golangci-lint updates stylecheck with this fix:
# https://github.com/dominikh/go-tools/issues/389
- text: "ST1016:"
- text: "ST1016:" # Ok with inconsistent receiver names
linters:
- stylecheck
- path: "migrations"
- path: "migrations" # migraitions always use deprecated code
text: "SA1019:"
linters:
- staticcheck
@ -72,9 +72,9 @@ issues:
- text: "SA1019: params.SendEnabled is deprecated" # TODO remove once ready to remove from the sdk
linters:
- staticcheck
- text: "SA1029: Inappropriate key in context.WithValue" # TODO remove this when dependency is updated
- text: "G115: integer overflow conversion" # We are doing this everywhere.
linters:
- staticcheck
- gosec
- text: "leading space"
linters:
- nolintlint

View File

@ -1589,7 +1589,6 @@ func TestABCI_GetBlockRetentionHeight(t *testing.T) {
}
for name, tc := range testCases {
tc := tc
tc.bapp.SetParamStore(&paramStore{db: coretesting.NewMemDB()})
_, err := tc.bapp.InitChain(&abci.InitChainRequest{
@ -2081,7 +2080,7 @@ func TestABCI_PrepareProposal_VoteExtensions(t *testing.T) {
return nil, err
}
cp := ctx.ConsensusParams() // nolint:staticcheck // ignore linting error
cp := ctx.ConsensusParams() //nolint:staticcheck // ignore linting error
extsEnabled := cp.Feature.VoteExtensionsEnableHeight != nil && req.Height >= cp.Feature.VoteExtensionsEnableHeight.Value && cp.Feature.VoteExtensionsEnableHeight.Value != 0
if !extsEnabled {
// check abci params

View File

@ -47,7 +47,7 @@ func ValidateVoteExtensions(
extCommit abci.ExtendedCommitInfo,
) error {
// Get values from context
cp := ctx.ConsensusParams() // nolint:staticcheck // ignore linting error
cp := ctx.ConsensusParams() //nolint:staticcheck // ignore linting error
currentHeight := ctx.HeaderInfo().Height
chainID := ctx.HeaderInfo().ChainID
commitInfo := ctx.CometInfo().LastCommit
@ -258,7 +258,7 @@ func (h *DefaultProposalHandler) SetTxSelector(ts TxSelector) {
func (h *DefaultProposalHandler) PrepareProposalHandler() sdk.PrepareProposalHandler {
return func(ctx sdk.Context, req *abci.PrepareProposalRequest) (*abci.PrepareProposalResponse, error) {
var maxBlockGas uint64
if b := ctx.ConsensusParams().Block; b != nil { // nolint:staticcheck // ignore linting error
if b := ctx.ConsensusParams().Block; b != nil { //nolint:staticcheck // ignore linting error
maxBlockGas = uint64(b.MaxGas)
}
@ -405,7 +405,7 @@ func (h *DefaultProposalHandler) ProcessProposalHandler() sdk.ProcessProposalHan
var totalTxGas uint64
var maxBlockGas int64
if b := ctx.ConsensusParams().Block; b != nil { // nolint:staticcheck // ignore linting error
if b := ctx.ConsensusParams().Block; b != nil { //nolint:staticcheck // ignore linting error
maxBlockGas = b.MaxGas
}

View File

@ -6,7 +6,7 @@ import (
"reflect"
gogoproto "github.com/cosmos/gogoproto/proto"
"github.com/golang/protobuf/proto" // nolint: staticcheck // needed because gogoproto.Merge does not work consistently. See NOTE: comments.
"github.com/golang/protobuf/proto" //nolint: staticcheck // needed because gogoproto.Merge does not work consistently. See NOTE: comments.
"google.golang.org/grpc"
proto2 "google.golang.org/protobuf/proto"
"google.golang.org/protobuf/reflect/protoreflect"

View File

@ -245,7 +245,6 @@ func TestABCI_OfferSnapshot_Errors(t *testing.T) {
}, abci.OFFER_SNAPSHOT_RESULT_REJECT},
}
for name, tc := range testCases {
tc := tc
t.Run(name, func(t *testing.T) {
resp, err := suite.baseApp.OfferSnapshot(&abci.OfferSnapshotRequest{Snapshot: tc.snapshot})
require.NoError(t, err)

View File

@ -125,8 +125,6 @@ func TestSetCmdClientContextHandler(t *testing.T) {
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
cmd := newCmd()
_ = testutil.ApplyMockIODiscardOutErr(cmd)

View File

@ -145,7 +145,6 @@ func TestConfigCmdEnvFlag(t *testing.T) {
}
for _, tc := range tt {
tc := tc
t.Run(tc.name, func(t *testing.T) {
testCmd := &cobra.Command{
Use: "test",

View File

@ -22,8 +22,6 @@ func TestParseGasSetting(t *testing.T) {
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
gs, err := flags.ParseGasSetting(tc.input)

View File

@ -172,7 +172,6 @@ func Test_runAddCmdLedgerDryRun(t *testing.T) {
}
for _, tt := range testData {
tt := tt
t.Run(tt.name, func(t *testing.T) {
cmd := AddKeyCommand()
cmd.Flags().AddFlagSet(Commands().PersistentFlags())

View File

@ -297,7 +297,6 @@ func Test_runAddCmdDryRun(t *testing.T) {
},
}
for _, tt := range testData {
tt := tt
t.Run(tt.name, func(t *testing.T) {
cmd := AddKeyCommand()
cmd.Flags().AddFlagSet(Commands().PersistentFlags())

View File

@ -67,7 +67,6 @@ func Test_runListCmd(t *testing.T) {
{"keybase: w/key", kbHome2, false},
}
for _, tt := range testData {
tt := tt
t.Run(tt.name, func(t *testing.T) {
cmd.SetArgs([]string{
fmt.Sprintf("--%s=%s", flags.FlagKeyringDir, tt.kbDir),

View File

@ -21,7 +21,6 @@ func TestParseKey(t *testing.T) {
{"hex", []string{hexstr}, false},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
require.Equal(t, tt.wantErr, doParseKey(ParseKeyStringCommand(), "cosmos", tt.args) != nil)
})

View File

@ -237,7 +237,6 @@ func Test_validateMultisigThreshold(t *testing.T) {
{"1-2", args{2, 1}, true},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
if err := validateMultisigThreshold(tt.args.k, tt.args.nKeys); (err != nil) != tt.wantErr {
t.Errorf("validateMultisigThreshold() error = %v, wantErr %v", err, tt.wantErr)
@ -272,7 +271,6 @@ func Test_getBechKeyOut(t *testing.T) {
{"cons", args{sdk.PrefixConsensus}, MkConsKeyOutput, false},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
output, err := getKeyOutput(ctx, tt.args.bechPrefix, k)
if tt.wantErr {

View File

@ -214,7 +214,6 @@ func TestAuxTxBuilder(t *testing.T) {
}
for _, tc := range testcases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
b = tx.NewAuxTxBuilder()
err := tc.malleate()

View File

@ -81,7 +81,6 @@ func TestCalculateGas(t *testing.T) {
}
for _, tc := range testCases {
stc := tc
txCfg, _ := newTestTxConfig()
defaultSignMode, err := signing.APISignModeToInternal(txCfg.SignModeHandler().DefaultMode())
require.NoError(t, err)
@ -90,16 +89,16 @@ func TestCalculateGas(t *testing.T) {
WithChainID("test-chain").
WithTxConfig(txCfg).WithSignMode(defaultSignMode)
t.Run(stc.name, func(t *testing.T) {
t.Run(tc.name, func(t *testing.T) {
mockClientCtx := mockContext{
gasUsed: tc.args.mockGasUsed,
wantErr: tc.args.mockWantErr,
}
simRes, gotAdjusted, err := CalculateGas(mockClientCtx, txf.WithGasAdjustment(stc.args.adjustment))
if stc.expPass {
simRes, gotAdjusted, err := CalculateGas(mockClientCtx, txf.WithGasAdjustment(tc.args.adjustment))
if tc.expPass {
require.NoError(t, err)
require.Equal(t, simRes.GasInfo.GasUsed, stc.wantEstimate)
require.Equal(t, gotAdjusted, stc.wantAdjusted)
require.Equal(t, simRes.GasInfo.GasUsed, tc.wantEstimate)
require.Equal(t, gotAdjusted, tc.wantAdjusted)
require.NotNil(t, simRes.Result)
} else {
require.Error(t, err)

View File

@ -67,7 +67,6 @@ func TestPaginate(t *testing.T) {
}
for i, tc := range testCases {
i, tc := i, tc
t.Run(tc.name, func(t *testing.T) {
start, end := client.Paginate(tc.numObjs, tc.page, tc.limit, tc.defLimit)
require.Equal(t, tc.expectedStart, start, "invalid result; test case #%d", i)

View File

@ -71,8 +71,6 @@ func TestAminoCodecMarshalJSONIndent(t *testing.T) {
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
cdc := codec.NewAminoCodec(createTestCodec())
bz, err := cdc.MarshalJSONIndent(tc.input, "", " ")

View File

@ -122,7 +122,7 @@ func testMarshaling(t *testing.T, cdc interface {
}
for _, tc := range testCases {
tc := tc
m1 := mustMarshaler{cdc.Marshal, cdc.MustMarshal, cdc.Unmarshal, cdc.MustUnmarshal}
m2 := mustMarshaler{cdc.MarshalLengthPrefixed, cdc.MustMarshalLengthPrefixed, cdc.UnmarshalLengthPrefixed, cdc.MustUnmarshalLengthPrefixed}
m3 := mustMarshaler{

View File

@ -22,7 +22,6 @@ func TestWireTypeToString(t *testing.T) {
}
for _, tt := range tests {
tt := tt
t.Run(fmt.Sprintf("wireType=%d", tt.typ), func(t *testing.T) {
if g, w := wireTypeToString(tt.typ), tt.want; g != w {
t.Fatalf("Mismatch:\nGot: %q\nWant: %q\n", g, w)

View File

@ -223,7 +223,6 @@ func TestRejectUnknownFieldsRepeated(t *testing.T) {
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
protoBlob, err := proto.Marshal(tt.in)
if err != nil {
@ -280,7 +279,6 @@ func TestRejectUnknownFields_allowUnknownNonCriticals(t *testing.T) {
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
blob, err := proto.Marshal(tt.in)
if err != nil {
@ -483,7 +481,6 @@ func TestRejectUnknownFieldsNested(t *testing.T) {
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
protoBlob, err := proto.Marshal(tt.in)
if err != nil {
@ -634,7 +631,6 @@ func TestRejectUnknownFieldsFlat(t *testing.T) {
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
blob, err := proto.Marshal(tt.in)
if err != nil {

View File

@ -107,9 +107,7 @@ func TestCreateHDPath(t *testing.T) {
{"m/44'/114'/1'/1/0", args{114, 1, 1}, hd.BIP44Params{Purpose: 44, CoinType: 114, Account: 1, AddressIndex: 1}},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
tt := tt
require.Equal(t, tt.want, *hd.CreateHDPath(tt.args.coinType, tt.args.account, tt.args.index))
})
}
@ -170,7 +168,6 @@ func TestDeriveHDPathRange(t *testing.T) {
}
for _, tt := range tests {
tt := tt
t.Run(tt.path, func(t *testing.T) {
master, ch := hd.ComputeMastersFromSeed(seed)
_, err := hd.DerivePrivateKeyForPath(master, ch, tt.path)
@ -297,7 +294,6 @@ func TestDerivePrivateKeyForPathDoNotCrash(t *testing.T) {
}
for _, path := range paths {
path := path
t.Run(path, func(t *testing.T) {
_, _ = hd.DerivePrivateKeyForPath([32]byte{}, [32]byte{}, path)
})

View File

@ -2003,7 +2003,7 @@ func TestRenameKey(t *testing.T) {
}
for _, tc := range testCases {
tc := tc
kr := newKeyring(t, "testKeyring")
t.Run(tc.name, func(t *testing.T) {
tc.run(kr)

View File

@ -36,7 +36,6 @@ func TestNewSigningAlgoByString(t *testing.T) {
list := SigningAlgoList{hd.Secp256k1}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
algorithm, err := NewSigningAlgoFromString(tt.algoStr, list)
if tt.isSupported {

View File

@ -12,7 +12,7 @@ import (
"github.com/cometbft/cometbft/crypto"
secp256k1dcrd "github.com/decred/dcrd/dcrec/secp256k1/v4"
"gitlab.com/yawning/secp256k1-voi/secec"
"golang.org/x/crypto/ripemd160" //nolint: staticcheck // keep around for backwards compatibility
"golang.org/x/crypto/ripemd160" //nolint:staticcheck,gosec // keep around for backwards compatibility
errorsmod "cosmossdk.io/errors"
@ -173,8 +173,8 @@ func (pubKey *PubKey) Address() crypto.Address {
}
sha := sha256.Sum256(pubKey.Key)
hasherRIPEMD160 := ripemd160.New()
hasherRIPEMD160.Write(sha[:]) // does not error
hasherRIPEMD160 := ripemd160.New() //nolint:gosec // keep around for backwards compatibility
hasherRIPEMD160.Write(sha[:]) // does not error
return crypto.Address(hasherRIPEMD160.Sum(nil))
}

View File

@ -27,7 +27,6 @@ func Test_genPrivKey(t *testing.T) {
{"valid because 0 < 1 < N", validOne, false},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
if tt.shouldPanic {
require.Panics(t, func() {

View File

@ -255,7 +255,6 @@ func TestGenPrivKeyFromSecret(t *testing.T) {
{"another seed used in cosmos tests #3", []byte("")},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
gotPrivKey := secp256k1.GenPrivKeyFromSecret(tt.secret)
require.NotNil(t, gotPrivKey)

View File

@ -58,7 +58,6 @@ func TestBitArrayEqual(t *testing.T) {
{name: "different should not be equal", b1: big1, b2: big2, eq: false},
}
for _, tc := range cases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
eq := tc.b1.Equal(tc.b2)
require.Equal(t, tc.eq, eq)
@ -102,7 +101,6 @@ func TestJSONMarshalUnmarshal(t *testing.T) {
}
for _, tc := range testCases {
tc := tc
t.Run(tc.bA.String(), func(t *testing.T) {
bz, err := json.Marshal(tc.bA)
require.NoError(t, err)
@ -162,7 +160,6 @@ func TestCompactMarshalUnmarshal(t *testing.T) {
}
for _, tc := range testCases {
tc := tc
t.Run(tc.bA.String(), func(t *testing.T) {
bz := tc.bA.CompactMarshal()
@ -209,8 +206,6 @@ func TestCompactBitArrayNumOfTrueBitsBefore(t *testing.T) {
{`"______________xx"`, []int{14, 15}, []int{0, 1}},
}
for tcIndex, tc := range testCases {
tc := tc
tcIndex := tcIndex
t.Run(tc.marshalledBA, func(t *testing.T) {
var bA *CompactBitArray
err := json.Unmarshal([]byte(tc.marshalledBA), &bA)
@ -283,7 +278,6 @@ func TestNewCompactBitArrayCrashWithLimits(t *testing.T) {
}
for _, tt := range tests {
tt := tt
t.Run(fmt.Sprintf("%d", tt.in), func(t *testing.T) {
got := NewCompactBitArray(tt.in)
if g := got != nil; g != tt.mustPass {

View File

@ -102,7 +102,7 @@ func TestABCIInfoSerializeErr(t *testing.T) {
},
}
for msg, spec := range specs {
spec := spec
_, _, log := ABCIInfo(spec.src, spec.debug)
if log != spec.exp {
t.Errorf("%s: expected log %s, got %s", msg, spec.exp, log)

View File

@ -90,7 +90,6 @@ func (s *decimalInternalTestSuite) TestDecMarshalJSON() {
{"12340Int", LegacyNewDec(12340), "\"12340.000000000000000000\"", false},
}
for _, tt := range tests {
tt := tt
s.T().Run(tt.name, func(t *testing.T) {
got, err := tt.d.MarshalJSON()
if (err != nil) != tt.wantErr {

View File

@ -261,7 +261,7 @@ func (s *decimalTestSuite) TestArithmetic() {
}
for tcIndex, tc := range tests {
tc := tc
resAdd := tc.d1.Add(tc.d2)
resSub := tc.d1.Sub(tc.d2)
resMul := tc.d1.Mul(tc.d2)
@ -727,7 +727,6 @@ func TestFormatDec(t *testing.T) {
require.NoError(t, err)
for _, tc := range testcases {
tc := tc
t.Run(tc[0], func(t *testing.T) {
out, err := math.FormatDec(tc[0])
require.NoError(t, err)

View File

@ -591,7 +591,6 @@ func TestFormatIntCorrectness(t *testing.T) {
}
for _, tt := range tests {
tt := tt
t.Run(tt.in, func(t *testing.T) {
got, err := math.FormatInt(tt.in)
if err != nil {

View File

@ -244,7 +244,7 @@ func (s *uintTestSuite) TestSafeSub() {
}
for i, tc := range testCases {
tc := tc
if tc.panic {
s.Require().Panics(func() { tc.x.Sub(tc.y) })
continue

View File

@ -140,7 +140,6 @@ func TestDurationOutOfRange(t *testing.T) {
},
}
for _, tc := range tt {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
val := protoreflect.ValueOfMessage(tc.dur.ProtoReflect())
@ -272,7 +271,6 @@ func TestDurationCompare(t *testing.T) {
}
for _, tc := range tt {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()

View File

@ -114,7 +114,6 @@ func TestTimestampOutOfRange(t *testing.T) {
},
}
for _, tc := range tt {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
val := protoreflect.ValueOfMessage(tc.ts.ProtoReflect())

View File

@ -45,7 +45,7 @@ type App struct {
ModuleManager *module.Manager
UnorderedTxManager *unorderedtx.Manager
configurator module.Configurator // nolint:staticcheck // SA1019: Configurator is deprecated but still used in runtime v1.
configurator module.Configurator //nolint:staticcheck // SA1019: Configurator is deprecated but still used in runtime v1.
config *runtimev1alpha1.Module
storeKeys []storetypes.StoreKey
interfaceRegistry codectypes.InterfaceRegistry
@ -251,7 +251,7 @@ func (a *App) RegisterNodeService(clientCtx client.Context, cfg config.Config) {
}
// Configurator returns the app's configurator.
func (a *App) Configurator() module.Configurator { // nolint:staticcheck // SA1019: Configurator is deprecated but still used in runtime v1.
func (a *App) Configurator() module.Configurator { //nolint:staticcheck // SA1019: Configurator is deprecated but still used in runtime v1.
return a.configurator
}

View File

@ -39,7 +39,7 @@ type appModule struct {
func (m appModule) IsOnePerModuleType() {}
func (m appModule) IsAppModule() {}
func (m appModule) RegisterServices(configurator module.Configurator) { // nolint:staticcheck // SA1019: Configurator is deprecated but still used in runtime v1.
func (m appModule) RegisterServices(configurator module.Configurator) { //nolint:staticcheck // SA1019: Configurator is deprecated but still used in runtime v1.
err := m.app.registerRuntimeServices(configurator)
if err != nil {
panic(err)

View File

@ -8,7 +8,7 @@ import (
"github.com/cosmos/cosmos-sdk/types/module"
)
func (a *App) registerRuntimeServices(cfg module.Configurator) error { // nolint:staticcheck // SA1019: Configurator is deprecated but still used in runtime v1.
func (a *App) registerRuntimeServices(cfg module.Configurator) error { //nolint:staticcheck // SA1019: Configurator is deprecated but still used in runtime v1.
autocliv1.RegisterQueryServer(cfg.QueryServer(), services.NewAutoCLIQueryService(a.ModuleManager.Modules))
reflectionSvc, err := services.NewReflectionService()

View File

@ -105,7 +105,7 @@ type autocliConfigurator struct {
err error
}
var _ module.Configurator = &autocliConfigurator{} // nolint:staticcheck // SA1019: Configurator is deprecated but still used in runtime v1.
var _ module.Configurator = &autocliConfigurator{} //nolint:staticcheck // SA1019: Configurator is deprecated but still used in runtime v1.
func (a *autocliConfigurator) MsgServer() gogogrpc.Server { return &a.msgServer }

View File

@ -577,7 +577,6 @@ func (m *MM[T]) assertNoForgottenModules(
}
var missing []string
for m := range m.modules {
m := m
if pass != nil && pass(m) {
continue
}

View File

@ -139,7 +139,7 @@ func ListenerMux(listeners ...Listener) Listener {
mux.onBatch = func(batch PacketBatch) error {
for _, listener := range listeners {
err := batch.apply(&listener)
err := batch.apply(&listener) //nolint:gosec // aliasing is safe here
if err != nil {
return err
}

View File

@ -332,6 +332,8 @@ func TestCompareModuleSchemas(t *testing.T) {
}
func requireModuleSchema(t *testing.T, types ...schema.Type) schema.ModuleSchema {
t.Helper()
s, err := schema.CompileModuleSchema(types...)
if err != nil {
t.Fatal(err)

View File

@ -75,7 +75,7 @@ func (c Field) ValidateValue(value interface{}, typeSet TypeSet) error {
}
err := enumType.ValidateValue(value.(string))
if err != nil {
return fmt.Errorf("invalid value for enum field %q: %v", c.Name, err)
return fmt.Errorf("invalid value for enum field %q: %v", c.Name, err) //nolint:errorlint // false positive due to using go1.12
}
default:
}

View File

@ -24,7 +24,7 @@ func TestAppSimulator_mirror(t *testing.T) {
})
}
func testAppSimulatorMirror(t *testing.T, retainDeletes bool) { // nolint: thelper // this isn't a test helper function
func testAppSimulatorMirror(t *testing.T, retainDeletes bool) { //nolint: thelper // this isn't a test helper function
stateSimOpts := statesim.Options{CanRetainDeletions: retainDeletes}
mirror, err := NewSimulator(Options{
StateSimOptions: stateSimOpts,

View File

@ -1,4 +1,4 @@
golangci_version=v1.60.1
golangci_version=v1.61.0
golangci_installed_version=$(shell golangci-lint version --format short 2>/dev/null)
#? setup-pre-commit: Set pre-commit git hook

View File

@ -3,10 +3,9 @@ package gogoreflection
import (
"reflect"
_ "github.com/cosmos/cosmos-proto" // look above
_ "github.com/cosmos/gogoproto/gogoproto" // required so it does register the gogoproto file descriptor
gogoproto "github.com/cosmos/gogoproto/proto"
_ "github.com/cosmos/cosmos-proto" // look above
"github.com/golang/protobuf/proto" //nolint:staticcheck // migrate in a future pr
)
@ -42,12 +41,12 @@ func getExtension(extID int32, m proto.Message) *gogoproto.ExtensionDesc {
for id, desc := range proto.RegisteredExtensions(m) { //nolint:staticcheck // keep for backward compatibility
if id == extID {
return &gogoproto.ExtensionDesc{
ExtendedType: desc.ExtendedType, //nolint:staticcheck // keep for backward compatibility
ExtensionType: desc.ExtensionType, //nolint:staticcheck // keep for backward compatibility
Field: desc.Field, //nolint:staticcheck // keep for backward compatibility
Name: desc.Name, //nolint:staticcheck // keep for backward compatibility
Tag: desc.Tag, //nolint:staticcheck // keep for backward compatibility
Filename: desc.Filename, //nolint:staticcheck // keep for backward compatibility
ExtendedType: desc.ExtendedType,
ExtensionType: desc.ExtensionType,
Field: desc.Field,
Name: desc.Name,
Tag: desc.Tag,
Filename: desc.Filename,
}
}
}

View File

@ -184,7 +184,7 @@ func fqn(prefix, name string) string {
// fileDescForType gets the file descriptor for the given type.
// The given type should be a proto message.
func (s *serverReflectionServer) fileDescForType(st reflect.Type) (*dpb.FileDescriptorProto, error) {
m, ok := reflect.Zero(reflect.PtrTo(st)).Interface().(protoMessage)
m, ok := reflect.Zero(reflect.PointerTo(st)).Interface().(protoMessage)
if !ok {
return nil, fmt.Errorf("failed to create message from type: %v", st)
}
@ -232,7 +232,7 @@ func typeForName(name string) (reflect.Type, error) {
}
func fileDescContainingExtension(st reflect.Type, ext int32) (*dpb.FileDescriptorProto, error) {
m, ok := reflect.Zero(reflect.PtrTo(st)).Interface().(proto.Message)
m, ok := reflect.Zero(reflect.PointerTo(st)).Interface().(proto.Message)
if !ok {
return nil, fmt.Errorf("failed to create message from type: %v", st)
}
@ -247,7 +247,7 @@ func fileDescContainingExtension(st reflect.Type, ext int32) (*dpb.FileDescripto
}
func (s *serverReflectionServer) allExtensionNumbersForType(st reflect.Type) ([]int32, error) {
m, ok := reflect.Zero(reflect.PtrTo(st)).Interface().(proto.Message)
m, ok := reflect.Zero(reflect.PointerTo(st)).Interface().(proto.Message)
if !ok {
return nil, fmt.Errorf("failed to create message from type: %v", st)
}

View File

@ -49,8 +49,6 @@ func TestGetPruningOptionsFromFlags(t *testing.T) {
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(j *testing.T) {
viper.Reset()
viper.SetDefault(FlagPruning, pruningtypes.PruningOptionDefault)

View File

@ -832,7 +832,7 @@ func testnetify[T types.Application](ctx *Context, testnetAppCreator types.AppCr
_, context := getCtx(ctx, true)
clientCreator := proxy.NewLocalClientCreator(cmtApp)
metrics := node.DefaultMetricsProvider(cmtcfg.DefaultConfig().Instrumentation)
_, _, _, _, _, proxyMetrics, _, _ := metrics(genDoc.ChainID) // nolint: dogsled // function from comet
_, _, _, _, _, proxyMetrics, _, _ := metrics(genDoc.ChainID) //nolint: dogsled // function from comet
proxyApp := proxy.NewAppConns(clientCreator, proxyMetrics)
if err := proxyApp.Start(); err != nil {
return nil, fmt.Errorf("error starting proxy app connections: %w", err)

View File

@ -3,10 +3,9 @@ package gogoreflection
import (
"reflect"
_ "github.com/cosmos/cosmos-proto" // look above
_ "github.com/cosmos/gogoproto/gogoproto" // required so it does register the gogoproto file descriptor
gogoproto "github.com/cosmos/gogoproto/proto"
_ "github.com/cosmos/cosmos-proto" // look above
"github.com/golang/protobuf/proto" //nolint:staticcheck // migrate in a future pr
)
@ -42,12 +41,12 @@ func getExtension(extID int32, m proto.Message) *gogoproto.ExtensionDesc {
for id, desc := range proto.RegisteredExtensions(m) { //nolint:staticcheck // keep for backward compatibility
if id == extID {
return &gogoproto.ExtensionDesc{
ExtendedType: desc.ExtendedType, //nolint:staticcheck // keep for backward compatibility
ExtensionType: desc.ExtensionType, //nolint:staticcheck // keep for backward compatibility
Field: desc.Field, //nolint:staticcheck // keep for backward compatibility
Name: desc.Name, //nolint:staticcheck // keep for backward compatibility
Tag: desc.Tag, //nolint:staticcheck // keep for backward compatibility
Filename: desc.Filename, //nolint:staticcheck // keep for backward compatibility
ExtendedType: desc.ExtendedType,
ExtensionType: desc.ExtensionType,
Field: desc.Field,
Name: desc.Name,
Tag: desc.Tag,
Filename: desc.Filename,
}
}
}

View File

@ -188,7 +188,7 @@ func fqn(prefix, name string) string {
// fileDescForType gets the file descriptor for the given type.
// The given type should be a proto message.
func (s *serverReflectionServer) fileDescForType(st reflect.Type) (*dpb.FileDescriptorProto, error) {
m, ok := reflect.Zero(reflect.PtrTo(st)).Interface().(protoMessage)
m, ok := reflect.Zero(reflect.PointerTo(st)).Interface().(protoMessage)
if !ok {
return nil, fmt.Errorf("failed to create message from type: %v", st)
}
@ -236,7 +236,7 @@ func typeForName(name string) (reflect.Type, error) {
}
func fileDescContainingExtension(st reflect.Type, ext int32) (*dpb.FileDescriptorProto, error) {
m, ok := reflect.Zero(reflect.PtrTo(st)).Interface().(gogoproto.Message)
m, ok := reflect.Zero(reflect.PointerTo(st)).Interface().(gogoproto.Message)
if !ok {
return nil, fmt.Errorf("failed to create message from type: %v", st)
}
@ -251,7 +251,7 @@ func fileDescContainingExtension(st reflect.Type, ext int32) (*dpb.FileDescripto
}
func (s *serverReflectionServer) allExtensionNumbersForType(st reflect.Type) ([]int32, error) {
m, ok := reflect.Zero(reflect.PtrTo(st)).Interface().(gogoproto.Message)
m, ok := reflect.Zero(reflect.PointerTo(st)).Interface().(gogoproto.Message)
if !ok {
return nil, fmt.Errorf("failed to create message from type: %v", st)
}

View File

@ -17,7 +17,7 @@ import (
// GlobalLabels defines the set of global labels that will be applied to all
// metrics emitted using the telemetry package function wrappers.
var GlobalLabels = []metrics.Label{} // nolint: ignore // false positive
var GlobalLabels = []metrics.Label{} //nolint: ignore // false positive
// NewLabel creates a new instance of Label with name and value
func NewLabel(name, value string) metrics.Label {

View File

@ -334,9 +334,6 @@ func TestConsensus_ExtendVote(t *testing.T) {
Block: &v1.BlockParams{
MaxGas: 5000000,
},
Abci: &v1.ABCIParams{
VoteExtensionsEnableHeight: 2,
},
Feature: &v1.FeatureParams{
VoteExtensionsEnableHeight: &gogotypes.Int64Value{Value: 2},
},
@ -376,9 +373,6 @@ func TestConsensus_VerifyVoteExtension(t *testing.T) {
Block: &v1.BlockParams{
MaxGas: 5000000,
},
Abci: &v1.ABCIParams{
VoteExtensionsEnableHeight: 2,
},
Feature: &v1.FeatureParams{
VoteExtensionsEnableHeight: &gogotypes.Int64Value{Value: 2},
},
@ -537,6 +531,7 @@ func TestConsensus_ProcessProposal_With_Handler(t *testing.T) {
Height: 1,
Txs: [][]byte{mockTx.Bytes(), append(mockTx.Bytes(), []byte("bad")...), mockTx.Bytes(), mockTx.Bytes()},
})
require.NoError(t, err)
require.Equal(t, res.Status, abciproto.PROCESS_PROPOSAL_STATUS_REJECT)
}
@ -642,9 +637,6 @@ func setUpConsensus(t *testing.T, gasLimit uint64, mempool mempool.Mempool[mock.
Block: &v1.BlockParams{
MaxGas: 300000,
},
Abci: &v1.ABCIParams{
VoteExtensionsEnableHeight: 2,
},
Feature: &v1.FeatureParams{
VoteExtensionsEnableHeight: &gogotypes.Int64Value{Value: 2},
},

View File

@ -89,7 +89,6 @@ func (s *Server[T]) Start(ctx context.Context) error {
g, ctx := errgroup.WithContext(ctx)
for _, mod := range s.components {
mod := mod
g.Go(func() error {
return mod.Start(ctx)
})
@ -110,7 +109,6 @@ func (s *Server[T]) Stop(ctx context.Context) error {
g, ctx := errgroup.WithContext(ctx)
for _, mod := range s.components {
mod := mod
g.Go(func() error {
return mod.Stop(ctx)
})
@ -198,7 +196,6 @@ func (s *Server[T]) Init(appI AppI[T], cfg map[string]any, logger log.Logger) er
var components []ServerComponent[T]
for _, mod := range s.components {
mod := mod
if err := mod.Init(appI, cfg, logger); err != nil {
return err
}

View File

@ -183,7 +183,7 @@ type SimApp struct {
sm *module.SimulationManager
// module configurator
configurator module.Configurator // nolint:staticcheck // SA1019: Configurator is deprecated but still used in runtime v1.
configurator module.Configurator //nolint:staticcheck // SA1019: Configurator is deprecated but still used in runtime v1.
}
func init() {
@ -705,7 +705,7 @@ func (app *SimApp) EndBlocker(ctx sdk.Context) (sdk.EndBlock, error) {
return app.ModuleManager.EndBlock(ctx)
}
func (a *SimApp) Configurator() module.Configurator { // nolint:staticcheck // SA1019: Configurator is deprecated but still used in runtime v1.
func (a *SimApp) Configurator() module.Configurator { //nolint:staticcheck // SA1019: Configurator is deprecated but still used in runtime v1.
return a.configurator
}

View File

@ -81,7 +81,6 @@ func TestSimGenesisAccountValidate(t *testing.T) {
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
require.Equal(t, tc.wantErr, tc.sga.Validate() != nil)
})

View File

@ -81,7 +81,6 @@ func TestSimGenesisAccountValidate(t *testing.T) {
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
require.Equal(t, tc.wantErr, tc.sga.Validate() != nil)
})

View File

@ -60,7 +60,6 @@ func TestFindStartIndex(t *testing.T) {
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
body := tt.sortedL
got := findStartIndex(body, tt.query)
@ -129,7 +128,6 @@ func TestFindEndIndex(t *testing.T) {
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
body := tt.sortedL
got := findEndIndex(body, tt.query)

View File

@ -300,7 +300,7 @@ func (store *Store) dirtyItems(start, end []byte) {
}
n := len(store.unsortedCache)
unsorted := make([]*kv.Pair, 0)
unsorted := make([]*kv.Pair, 0) //nolint:staticcheck // We are in store v1.
// If the unsortedCache is too big, its costs too much to determine
// what's in the subset we are concerned about.
// If you are interleaving iterator calls with writes, this can easily become an
@ -312,7 +312,7 @@ func (store *Store) dirtyItems(start, end []byte) {
// dbm.IsKeyInDomain is nil safe and returns true iff key is greater than start
if dbm.IsKeyInDomain(conv.UnsafeStrToBytes(key), start, end) {
cacheValue := store.cache[key]
unsorted = append(unsorted, &kv.Pair{Key: []byte(key), Value: cacheValue.value})
unsorted = append(unsorted, &kv.Pair{Key: []byte(key), Value: cacheValue.value}) //nolint:staticcheck // We are in store v1.
}
}
store.clearUnsortedCacheSubset(unsorted, stateUnsorted)
@ -355,18 +355,18 @@ func (store *Store) dirtyItems(start, end []byte) {
}
}
kvL := make([]*kv.Pair, 0, 1+endIndex-startIndex)
kvL := make([]*kv.Pair, 0, 1+endIndex-startIndex) //nolint:staticcheck // We are in store v1.
for i := startIndex; i <= endIndex; i++ {
key := strL[i]
cacheValue := store.cache[key]
kvL = append(kvL, &kv.Pair{Key: []byte(key), Value: cacheValue.value})
kvL = append(kvL, &kv.Pair{Key: []byte(key), Value: cacheValue.value}) //nolint:staticcheck // We are in store v1.
}
// kvL was already sorted so pass it in as is.
store.clearUnsortedCacheSubset(kvL, stateAlreadySorted)
}
func (store *Store) clearUnsortedCacheSubset(unsorted []*kv.Pair, sortState sortState) {
func (store *Store) clearUnsortedCacheSubset(unsorted []*kv.Pair, sortState sortState) { //nolint:staticcheck // We are in store v1.
n := len(store.unsortedCache)
if len(unsorted) == n { // This pattern allows the Go compiler to emit the map clearing idiom for the entire map.
for key := range store.unsortedCache {

View File

@ -475,6 +475,10 @@ func doOp(t *testing.T, st types.CacheKVStore, truth corestore.KVStoreWithBatch,
err := truth.Set(keyFmt(k), valFmt(k))
require.NoError(t, err)
case opSetRange:
if len(args) < 2 {
panic("expected 2 args")
}
start := args[0]
end := args[1]
setRange(t, st, truth, start, end)
@ -484,6 +488,10 @@ func doOp(t *testing.T, st types.CacheKVStore, truth corestore.KVStoreWithBatch,
err := truth.Delete(keyFmt(k))
require.NoError(t, err)
case opDelRange:
if len(args) < 2 {
panic("expected 2 args")
}
start := args[0]
end := args[1]
deleteRange(t, st, truth, start, end)

View File

@ -361,8 +361,8 @@ func (st *Store) Query(req *types.RequestQuery) (res *types.ResponseQuery, err e
res.ProofOps = getProofFromTree(mtree, req.Data, res.Value != nil)
case "/subspace":
pairs := kv.Pairs{
Pairs: make([]kv.Pair, 0),
pairs := kv.Pairs{ //nolint:staticcheck // We are in store v1.
Pairs: make([]kv.Pair, 0), //nolint:staticcheck // We are in store v1.
}
subspace := req.Data
@ -370,7 +370,7 @@ func (st *Store) Query(req *types.RequestQuery) (res *types.ResponseQuery, err e
iterator := types.KVStorePrefixIterator(st, subspace)
for ; iterator.Valid(); iterator.Next() {
pairs.Pairs = append(pairs.Pairs, kv.Pair{Key: iterator.Key(), Value: iterator.Value()})
pairs.Pairs = append(pairs.Pairs, kv.Pair{Key: iterator.Key(), Value: iterator.Value()}) //nolint:staticcheck // We are in store v1.
}
if err := iterator.Close(); err != nil {
panic(fmt.Errorf("failed to close iterator: %w", err))

View File

@ -478,15 +478,15 @@ func TestIAVLStoreQuery(t *testing.T) {
v3 := []byte("val3")
ksub := []byte("key")
KVs0 := kv.Pairs{}
KVs1 := kv.Pairs{
Pairs: []kv.Pair{
KVs0 := kv.Pairs{} //nolint:staticcheck // We are in store v1.
KVs1 := kv.Pairs{ //nolint:staticcheck // We are in store v1.
Pairs: []kv.Pair{ //nolint:staticcheck // We are in store v1.
{Key: k1, Value: v1},
{Key: k2, Value: v2},
},
}
KVs2 := kv.Pairs{
Pairs: []kv.Pair{
KVs2 := kv.Pairs{ //nolint:staticcheck // We are in store v1.
Pairs: []kv.Pair{ //nolint:staticcheck // We are in store v1.
{Key: k1, Value: v3},
{Key: k2, Value: v2},
},
@ -639,8 +639,6 @@ func TestSetInitialVersion(t *testing.T) {
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
db := coretesting.NewMemDB()
store := tc.storeFn(db)

View File

@ -14,13 +14,13 @@ import (
// merkleMap defines a merkle-ized tree from a map. Leave values are treated as
// hash(key) | hash(value). Leaves are sorted before Merkle hashing.
type merkleMap struct {
kvs kv.Pairs
kvs kv.Pairs //nolint:staticcheck // We are in store v1.
sorted bool
}
func newMerkleMap() *merkleMap {
return &merkleMap{
kvs: kv.Pairs{},
kvs: kv.Pairs{}, //nolint:staticcheck // We are in store v1.
sorted: false,
}
}
@ -38,7 +38,7 @@ func (sm *merkleMap) set(key string, value []byte) {
// and make a determination to fetch or not.
vhash := sha256.Sum256(value)
sm.kvs.Pairs = append(sm.kvs.Pairs, kv.Pair{
sm.kvs.Pairs = append(sm.kvs.Pairs, kv.Pair{ //nolint:staticcheck // We are in store v1.
Key: byteKey,
Value: vhash[:],
})
@ -61,7 +61,7 @@ func (sm *merkleMap) sort() {
// hashKVPairs hashes a kvPair and creates a merkle tree where the leaves are
// byte slices.
func hashKVPairs(kvs kv.Pairs) []byte {
func hashKVPairs(kvs kv.Pairs) []byte { //nolint:staticcheck // We are in store v1.
kvsH := make([][]byte, len(kvs.Pairs))
for i, kvp := range kvs.Pairs {
kvsH[i] = KVPair(kvp).Bytes()
@ -76,13 +76,13 @@ func hashKVPairs(kvs kv.Pairs) []byte {
// Leaves are `hash(key) | hash(value)`.
// Leaves are sorted before Merkle hashing.
type simpleMap struct {
Kvs kv.Pairs
Kvs kv.Pairs //nolint:staticcheck // We are in store v1.
sorted bool
}
func newSimpleMap() *simpleMap {
return &simpleMap{
Kvs: kv.Pairs{},
Kvs: kv.Pairs{}, //nolint:staticcheck // We are in store v1.
sorted: false,
}
}
@ -99,7 +99,7 @@ func (sm *simpleMap) Set(key string, value []byte) {
// and make a determination to fetch or not.
vhash := sha256.Sum256(value)
sm.Kvs.Pairs = append(sm.Kvs.Pairs, kv.Pair{
sm.Kvs.Pairs = append(sm.Kvs.Pairs, kv.Pair{ //nolint:staticcheck // We are in store v1.
Key: byteKey,
Value: vhash[:],
})
@ -122,10 +122,10 @@ func (sm *simpleMap) Sort() {
// KVPairs returns a copy of sorted KVPairs.
// NOTE these contain the hashed key and value.
func (sm *simpleMap) KVPairs() kv.Pairs {
func (sm *simpleMap) KVPairs() kv.Pairs { //nolint:staticcheck // We are in store v1.
sm.Sort()
kvs := kv.Pairs{
Pairs: make([]kv.Pair, len(sm.Kvs.Pairs)),
kvs := kv.Pairs{ //nolint:staticcheck // We are in store v1.
Pairs: make([]kv.Pair, len(sm.Kvs.Pairs)), //nolint:staticcheck // We are in store v1.
}
copy(kvs.Pairs, sm.Kvs.Pairs)
@ -137,12 +137,12 @@ func (sm *simpleMap) KVPairs() kv.Pairs {
// KVPair is a local extension to KVPair that can be hashed.
// Key and value are length prefixed and concatenated,
// then hashed.
type KVPair kv.Pair
type KVPair kv.Pair //nolint:staticcheck // We are in store v1.
// NewKVPair takes in a key and value and creates a kv.Pair
// wrapped in the local extension KVPair
func NewKVPair(key, value []byte) KVPair {
return KVPair(kv.Pair{
return KVPair(kv.Pair{ //nolint:staticcheck // We are in store v1.
Key: key,
Value: value,
})

View File

@ -19,7 +19,7 @@ func bz(s string) []byte { return []byte(s) }
func keyFmt(i int) []byte { return bz(fmt.Sprintf("key%0.8d", i)) }
func valFmt(i int) []byte { return bz(fmt.Sprintf("value%0.8d", i)) }
var kvPairs = []kv.Pair{
var kvPairs = []kv.Pair{ //nolint:staticcheck // We are in store v1.
{Key: keyFmt(1), Value: valFmt(1)},
{Key: keyFmt(2), Value: valFmt(2)},
{Key: keyFmt(3), Value: valFmt(3)},

View File

@ -75,7 +75,7 @@ func TestStrategies(t *testing.T) {
}
for name, tc := range testcases {
tc := tc // Local copy to avoid shadowing.
// Local copy to avoid shadowing.
t.Run(name, func(t *testing.T) {
t.Parallel()

View File

@ -142,7 +142,6 @@ func TestMultistoreSnapshot_Checksum(t *testing.T) {
}},
}
for _, tc := range testcases {
tc := tc
t.Run(fmt.Sprintf("Format %v", tc.format), func(t *testing.T) {
ch := make(chan io.ReadCloser)
go func() {
@ -177,7 +176,6 @@ func TestMultistoreSnapshot_Errors(t *testing.T) {
"unknown height": {9, nil},
}
for name, tc := range testcases {
tc := tc
t.Run(name, func(t *testing.T) {
err := store.Snapshot(tc.height, nil)
require.Error(t, err)

View File

@ -524,8 +524,6 @@ func TestMultiStore_Pruning(t *testing.T) {
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
db := coretesting.NewMemDB()
ms := newMultiStoreWithMounts(db, tc.po)

View File

@ -21,7 +21,7 @@ func bz(s string) []byte { return []byte(s) }
func keyFmt(i int) []byte { return bz(fmt.Sprintf("key%0.8d", i)) }
func valFmt(i int) []byte { return bz(fmt.Sprintf("value%0.8d", i)) }
var kvPairs = []kv.Pair{
var kvPairs = []kv.Pair{ //nolint:staticcheck // We are in store v1.
{Key: keyFmt(1), Value: valFmt(1)},
{Key: keyFmt(2), Value: valFmt(2)},
{Key: keyFmt(3), Value: valFmt(3)},

View File

@ -47,7 +47,6 @@ func TestGasMeter(t *testing.T) {
used := uint64(0)
for unum, usage := range tc.usage {
usage := usage
used += usage
require.NotPanics(t, func() { meter.ConsumeGas(usage, "") }, "Not exceeded limit but panicked. tc #%d, usage #%d", tcnum, unum)
require.Equal(t, used, meter.GasConsumed(), "Gas consumption not match. tc #%d, usage #%d", tcnum, unum)

View File

@ -84,7 +84,6 @@ func TestPaginatedIterator(t *testing.T) {
reverse: true,
},
} {
tc := tc
t.Run(tc.desc, func(t *testing.T) {
var iter types.Iterator
if tc.reverse {

View File

@ -53,7 +53,6 @@ func TestStoreUpgrades(t *testing.T) {
}
for name, tc := range cases {
tc := tc
t.Run(name, func(t *testing.T) {
for _, r := range tc.expectAdd {
assert.Equal(t, tc.upgrades.IsAdded(r.key), true)

View File

@ -456,7 +456,6 @@ func (s *RootStoreTestSuite) TestPrune() {
}
for _, tc := range testCases {
tc := tc
s.newStoreWithPruneConfig(&tc.po)

View File

@ -357,7 +357,6 @@ func (s *E2ETestSuite) TestCLIQueryTxCmdByHash() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := authcli.QueryTxCmd()
clientCtx := val.GetClientCtx()
@ -490,7 +489,6 @@ func (s *E2ETestSuite) TestCLIQueryTxCmdByEvents() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := authcli.QueryTxCmd()
clientCtx := val.GetClientCtx()
@ -570,7 +568,6 @@ func (s *E2ETestSuite) TestCLIQueryTxsCmdByEvents() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := authcli.QueryTxsByEventsCmd()
clientCtx := val.GetClientCtx()
@ -1503,7 +1500,6 @@ func (s *E2ETestSuite) TestAuxSigner() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
_, err := govtestutil.MsgSubmitLegacyProposal(
val.GetClientCtx(),
@ -1730,7 +1726,6 @@ func (s *E2ETestSuite) TestAuxToFeeWithTips() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
res, err := govtestutil.MsgSubmitLegacyProposal(
val.GetClientCtx(),

View File

@ -63,7 +63,6 @@ func (s *E2ETestSuite) TestQueryGrantGRPC() {
},
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
resp, _ := testutil.GetRequest(tc.url)
require := s.Require()
@ -151,7 +150,6 @@ func (s *E2ETestSuite) TestQueryGrantsGRPC() {
},
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
tc.preRun()
resp, err := testutil.GetRequest(tc.url)

View File

@ -305,7 +305,6 @@ func (s *E2ETestSuite) TestNewExecGenericAuthorized() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := cli.NewCmdExecAuthorization()
clientCtx := val.GetClientCtx()
@ -415,7 +414,6 @@ func (s *E2ETestSuite) TestNewExecGrantAuthorized() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := cli.NewCmdExecAuthorization()
clientCtx := val.GetClientCtx()
@ -619,7 +617,6 @@ func (s *E2ETestSuite) TestExecDelegateAuthorization() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := cli.NewCmdExecAuthorization()
clientCtx := val.GetClientCtx()
@ -686,7 +683,6 @@ func (s *E2ETestSuite) TestExecDelegateAuthorization() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := cli.NewCmdExecAuthorization()
clientCtx := val.GetClientCtx()
@ -841,7 +837,6 @@ func (s *E2ETestSuite) TestExecUndelegateAuthorization() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := cli.NewCmdExecAuthorization()
clientCtx := val.GetClientCtx()
@ -909,7 +904,6 @@ func (s *E2ETestSuite) TestExecUndelegateAuthorization() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := cli.NewCmdExecAuthorization()
clientCtx := val.GetClientCtx()

View File

@ -154,7 +154,6 @@ func (s *E2ETestSuite) TestLatestValidatorSet_GRPC() {
{"with pagination", &cmtservice.GetLatestValidatorSetRequest{Pagination: &qtypes.PageRequest{Offset: 0, Limit: uint64(len(vals))}}, false, ""},
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
grpcRes, err := s.queryClient.GetLatestValidatorSet(context.Background(), tc.req)
if tc.expErr {
@ -185,7 +184,6 @@ func (s *E2ETestSuite) TestLatestValidatorSet_GRPCGateway() {
{"with pagination", fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validatorsets/latest?pagination.offset=0&pagination.limit=2", vals[0].GetAPIAddress()), false, ""},
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
res, err := testutil.GetRequest(tc.url)
s.Require().NoError(err)
@ -218,7 +216,6 @@ func (s *E2ETestSuite) TestValidatorSetByHeight_GRPC() {
{"with pagination", &cmtservice.GetValidatorSetByHeightRequest{Height: 1, Pagination: &qtypes.PageRequest{Offset: 0, Limit: 1}}, false, ""},
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
grpcRes, err := s.queryClient.GetValidatorSetByHeight(context.Background(), tc.req)
if tc.expErr {
@ -247,7 +244,6 @@ func (s *E2ETestSuite) TestValidatorSetByHeight_GRPCGateway() {
{"with pagination", fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validatorsets/%d?pagination.offset=0&pagination.limit=2", vals[0].GetAPIAddress(), 1), false, ""},
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
res, err := testutil.GetRequest(tc.url)
s.Require().NoError(err)
@ -326,8 +322,6 @@ func (s *E2ETestSuite) TestABCIQuery() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
res, err := s.queryClient.ABCIQuery(context.Background(), tc.req)
if tc.expectErr {

View File

@ -64,7 +64,7 @@ func (s *GRPCQueryTestSuite) TestQueryParamsGRPC() {
}
for _, tc := range testCases {
tc := tc
resp, err := sdktestutil.GetRequest(tc.url)
s.Run(tc.name, func() {
s.Require().NoError(err)
@ -99,7 +99,7 @@ func (s *GRPCQueryTestSuite) TestQueryValidatorDistributionInfoGRPC() {
}
for _, tc := range testCases {
tc := tc
resp, err := sdktestutil.GetRequest(tc.url)
s.Run(tc.name, func() {
if tc.expErr {
@ -152,7 +152,7 @@ func (s *GRPCQueryTestSuite) TestQueryOutstandingRewardsGRPC() {
}
for _, tc := range testCases {
tc := tc
resp, err := sdktestutil.GetRequestWithHeaders(tc.url, tc.headers)
s.Run(tc.name, func() {
if tc.expErr {
@ -206,7 +206,7 @@ func (s *GRPCQueryTestSuite) TestQueryValidatorCommissionGRPC() {
}
for _, tc := range testCases {
tc := tc
resp, err := sdktestutil.GetRequestWithHeaders(tc.url, tc.headers)
s.Run(tc.name, func() {
if tc.expErr {
@ -264,7 +264,7 @@ func (s *GRPCQueryTestSuite) TestQuerySlashesGRPC() {
}
for _, tc := range testCases {
tc := tc
resp, err := sdktestutil.GetRequest(tc.url)
s.Run(tc.name, func() {
@ -340,7 +340,7 @@ func (s *GRPCQueryTestSuite) TestQueryDelegatorRewardsGRPC() {
}
for _, tc := range testCases {
tc := tc
resp, err := sdktestutil.GetRequestWithHeaders(tc.url, tc.headers)
s.Run(tc.name, func() {
@ -392,7 +392,7 @@ func (s *GRPCQueryTestSuite) TestQueryDelegatorValidatorsGRPC() {
}
for _, tc := range testCases {
tc := tc
resp, err := sdktestutil.GetRequest(tc.url)
s.Run(tc.name, func() {
@ -444,7 +444,7 @@ func (s *GRPCQueryTestSuite) TestQueryWithdrawAddressGRPC() {
}
for _, tc := range testCases {
tc := tc
resp, err := sdktestutil.GetRequest(tc.url)
s.Run(tc.name, func() {

View File

@ -164,8 +164,6 @@ func (s *E2ETestSuite) TestNewCmdSubmitProposal() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := cli.NewCmdSubmitProposal()
clientCtx := val.GetClientCtx()
@ -260,8 +258,6 @@ func (s *E2ETestSuite) TestNewCmdSubmitLegacyProposal() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := cli.NewCmdSubmitLegacyProposal()
clientCtx := val.GetClientCtx()
@ -357,7 +353,6 @@ func (s *E2ETestSuite) TestNewCmdWeightedVote() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := cli.NewCmdWeightedVote()
clientCtx := val.GetClientCtx()

View File

@ -175,7 +175,6 @@ func (s *E2ETestSuite) TestSimulateTx_GRPC() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
// Broadcast the tx via gRPC via the validator's clientCtx (which goes
// through Tendermint).
@ -506,7 +505,6 @@ func (s *E2ETestSuite) TestBroadcastTx_GRPC() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
// Broadcast the tx via gRPC via the validator's clientCtx (which goes
// through Tendermint).
@ -772,7 +770,6 @@ func (s *E2ETestSuite) TestTxEncode_GRPC() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
res, err := s.queryClient.TxEncode(context.Background(), tc.req)
if tc.expErr {
@ -852,7 +849,6 @@ func (s *E2ETestSuite) TestTxDecode_GRPC() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
res, err := s.queryClient.TxDecode(context.Background(), tc.req)
if tc.expErr {
@ -963,7 +959,6 @@ func (s *E2ETestSuite) TestTxEncodeAmino_GRPC() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
res, err := s.queryClient.TxEncodeAmino(context.Background(), tc.req)
if tc.expErr {
@ -1049,7 +1044,6 @@ func (s *E2ETestSuite) TestTxDecodeAmino_GRPC() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
res, err := s.queryClient.TxDecodeAmino(context.Background(), tc.req)
if tc.expErr {

View File

@ -274,7 +274,6 @@ func (s *CLITestSuite) TestCLIQueryTxCmdByHash() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := authcli.QueryTxCmd()
cmd.SetArgs(tc.args)
@ -340,7 +339,6 @@ func (s *CLITestSuite) TestCLIQueryTxCmdByEvents() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := authcli.QueryTxCmd()
cmd.SetArgs(tc.args)
@ -383,7 +381,6 @@ func (s *CLITestSuite) TestCLIQueryTxsCmdByEvents() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := authcli.QueryTxsByEventsCmd()
@ -1019,7 +1016,6 @@ func (s *CLITestSuite) TestAuxSigner() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
_, err := govtestutil.MsgSubmitLegacyProposal(
s.clientCtx,
@ -1238,7 +1234,6 @@ func (s *CLITestSuite) TestAuxToFeeWithTips() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
res, err := govtestutil.MsgSubmitLegacyProposal(
s.clientCtx,

View File

@ -250,7 +250,6 @@ func TestAsyncExec(t *testing.T) {
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
res, err := f.app.RunMsg(
tc.req,

View File

@ -111,8 +111,6 @@ func (s *CLITestSuite) TestTxWithdrawAllRewardsCmd() {
}
for _, tc := range testCases {
tc := tc
s.Run(tc.name, func() {
cmd := cli.NewWithdrawAllRewardsCmd()

View File

@ -331,7 +331,6 @@ func TestMsgWithdrawDelegatorReward(t *testing.T) {
height := f.app.LastBlockHeight()
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
res, err := f.app.RunMsg(
tc.msg,
@ -469,7 +468,6 @@ func TestMsgSetWithdrawAddress(t *testing.T) {
},
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
tc.preRun()
res, err := f.app.RunMsg(
@ -566,7 +564,6 @@ func TestMsgWithdrawValidatorCommission(t *testing.T) {
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
res, err := f.app.RunMsg(
tc.msg,
@ -600,7 +597,6 @@ func TestMsgWithdrawValidatorCommission(t *testing.T) {
}, remainder.Commission)
}
})
}
}
@ -666,7 +662,6 @@ func TestMsgFundCommunityPool(t *testing.T) {
},
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
res, err := f.app.RunMsg(
tc.msg,
@ -808,7 +803,6 @@ func TestMsgUpdateParams(t *testing.T) {
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
res, err := f.app.RunMsg(
tc.msg,
@ -891,7 +885,6 @@ func TestMsgCommunityPoolSpend(t *testing.T) {
},
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
res, err := f.app.RunMsg(
tc.msg,
@ -992,7 +985,6 @@ func TestMsgDepositValidatorRewardsPool(t *testing.T) {
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
res, err := f.app.RunMsg(
tc.msg,

View File

@ -72,7 +72,6 @@ func TestAddGenesisAccountCmd(t *testing.T) {
}
for _, tc := range tests {
tc := tc
t.Run(tc.name, func(t *testing.T) {
home := t.TempDir()
logger := log.NewNopLogger()
@ -217,7 +216,6 @@ func TestBulkAddGenesisAccountCmd(t *testing.T) {
}
for _, tc := range tests {
tc := tc
t.Run(tc.name, func(t *testing.T) {
home := t.TempDir()
logger := log.NewNopLogger()

View File

@ -61,7 +61,6 @@ func TestInitCmd(t *testing.T) {
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
home := t.TempDir()
logger := log.NewNopLogger()

View File

@ -163,8 +163,6 @@ func TestCancelUnbondingDelegation(t *testing.T) {
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
_, err := msgServer.CancelUnbondingDelegation(ctx, &tc.req)
if tc.exceptErr {

View File

@ -51,7 +51,6 @@ func TestGetSimulationLog(t *testing.T) {
}
for _, tt := range tests {
tt := tt
t.Run(tt.store, func(t *testing.T) {
require.Equal(t, tt.expectedLog, GetSimulationLog(tt.store, decoders, tt.kvPairs, tt.kvPairs), tt.store)
})

View File

@ -74,7 +74,6 @@ func PlanBuilder(from *tomledit.Document, to, planType string, loadFn loadDestCo
diffs := DiffKeys(from, target)
for _, diff := range diffs {
diff := diff
kv := diff.KV
var step transform.Step

View File

@ -765,8 +765,6 @@ func (s *argsTestSuite) TestGetConfigFromEnv() {
}
for _, tc := range tests {
tc := tc
s.T().Run(tc.name, func(t *testing.T) {
s.setEnv(t, &tc.envVals)
cfg, err := GetConfigFromEnv(false)

View File

@ -355,8 +355,6 @@ func (s *InitTestSuite) TestInitializeCosmovisorNegativeValidation() {
}
for _, tc := range tests {
tc := tc
s.T().Run(tc.name, func(t *testing.T) {
s.setEnv(t, &tc.env)
buffer, logger := s.NewCapturingLogger()

View File

@ -422,7 +422,6 @@ func (s *addressTestSuite) TestBech32ifyAddressBytes() {
{"20-byte address", args{"prefixb", addr20byte}, "prefixb1qqqsyqcyq5rqwzqfpg9scrgwpugpzysnrujsuw", false},
}
for _, tt := range tests {
tt := tt
s.T().Run(tt.name, func(t *testing.T) {
got, err := types.Bech32ifyAddressBytes(tt.args.prefix, tt.args.bs)
if (err != nil) != tt.wantErr {
@ -453,7 +452,6 @@ func (s *addressTestSuite) TestMustBech32ifyAddressBytes() {
{"20-byte address", args{"prefixb", addr20byte}, "prefixb1qqqsyqcyq5rqwzqfpg9scrgwpugpzysnrujsuw", false},
}
for _, tt := range tests {
tt := tt
s.T().Run(tt.name, func(t *testing.T) {
if tt.wantPanic {
require.Panics(t, func() { types.MustBech32ifyAddressBytes(tt.args.prefix, tt.args.bs) })

View File

@ -432,7 +432,6 @@ func (coins Coins) SafeMulInt(x math.Int) (Coins, bool) {
res := make(Coins, len(coins))
for i, coin := range coins {
coin := coin
res[i] = NewCoin(coin.Denom, coin.Amount.Mul(x))
}
@ -466,7 +465,6 @@ func (coins Coins) SafeQuoInt(x math.Int) (Coins, bool) {
var res Coins
for _, coin := range coins {
coin := coin
res = append(res, NewCoin(coin.Denom, coin.Amount.Quo(x)))
}

View File

@ -178,7 +178,6 @@ func (s *coinTestSuite) TestAddCoin() {
}
for tcIndex, tc := range cases {
tc := tc
if tc.shouldPanic {
s.Require().Panics(func() { tc.inputOne.Add(tc.inputTwo) })
} else {
@ -218,7 +217,6 @@ func (s *coinTestSuite) TestSubCoin() {
}
for tcIndex, tc := range cases {
tc := tc
if tc.shouldPanic {
s.Require().Panics(func() { tc.inputOne.Sub(tc.inputTwo) })
} else {
@ -274,7 +272,6 @@ func (s *coinTestSuite) TestMulIntCoins() {
assert := s.Assert()
for i, tc := range testCases {
tc := tc
if tc.shouldPanic {
assert.Panics(func() { tc.input.MulInt(tc.multiplier) })
} else {
@ -301,7 +298,6 @@ func (s *coinTestSuite) TestQuoIntCoins() {
assert := s.Assert()
for i, tc := range testCases {
tc := tc
if tc.shouldPanic {
assert.Panics(func() { tc.input.QuoInt(tc.divisor) })
} else {
@ -326,7 +322,6 @@ func (s *coinTestSuite) TestIsGTCoin() {
}
for tcIndex, tc := range cases {
tc := tc
if tc.panics {
s.Require().Panics(func() { tc.inputOne.IsGT(tc.inputTwo) })
} else {
@ -350,7 +345,6 @@ func (s *coinTestSuite) TestIsGTECoin() {
}
for tcIndex, tc := range cases {
tc := tc
if tc.panics {
s.Require().Panics(func() { tc.inputOne.IsGTE(tc.inputTwo) })
} else {
@ -374,7 +368,6 @@ func (s *coinTestSuite) TestIsLTECoin() {
}
for tcIndex, tc := range cases {
tc := tc
if tc.panics {
s.Require().Panics(func() { tc.inputOne.IsLTE(tc.inputTwo) })
} else {
@ -400,7 +393,6 @@ func (s *coinTestSuite) TestIsLTCoin() {
}
for tcIndex, tc := range cases {
tc := tc
if tc.panics {
s.Require().Panics(func() { tc.inputOne.IsLT(tc.inputTwo) })
} else {
@ -683,7 +675,6 @@ func (s *coinTestSuite) TestSubCoins() {
assert := s.Assert()
for i, tc := range testCases {
tc := tc
if tc.shouldPanic {
assert.Panics(func() { tc.inputOne.Sub(tc.inputTwo...) })
} else {
@ -708,7 +699,7 @@ func (s *coinTestSuite) TestSafeSubCoin() {
}
for _, tc := range cases {
tc := tc
res, err := tc.inputOne.SafeSub(tc.inputTwo)
if err != nil {
s.Require().Contains(err.Error(), tc.expErrMsg)
@ -1373,7 +1364,7 @@ func (s *coinTestSuite) TestCoinValidate() {
}
for _, tc := range testCases {
tc := tc
t := s.T()
t.Run(tc.name, func(t *testing.T) {
err := tc.coin.Validate()

View File

@ -203,7 +203,6 @@ func (s *contextTestSuite) TestContextHeaderClone() {
}
for name, tc := range cases {
tc := tc
s.T().Run(name, func(t *testing.T) {
ctx := types.NewContext(nil, false, nil).WithBlockHeader(tc.h)
s.Require().Equal(tc.h.Height, ctx.BlockHeight())

View File

@ -211,7 +211,6 @@ func (s *decCoinTestSuite) TestIsValid() {
}
for _, tc := range tests {
tc := tc
if tc.expectPass {
s.Require().True(tc.coin.IsValid(), tc.msg)
} else {
@ -246,7 +245,6 @@ func (s *decCoinTestSuite) TestSubDecCoin() {
decCoin := sdk.NewDecCoin("mytoken", math.NewInt(10))
for _, tc := range tests {
tc := tc
if tc.expectPass {
equal := tc.coin.Sub(decCoin)
s.Require().Equal(equal, decCoin, tc.msg)
@ -282,7 +280,6 @@ func (s *decCoinTestSuite) TestSubDecCoins() {
decCoins := sdk.NewDecCoinsFromCoins(sdk.NewCoin("btc", math.NewInt(10)), sdk.NewCoin("eth", math.NewInt(15)), sdk.NewCoin("mytoken", math.NewInt(5)))
for _, tc := range tests {
tc := tc
if tc.expectPass {
equal := tc.coins.Sub(decCoins)
s.Require().Equal(equal, decCoins, tc.msg)
@ -527,7 +524,6 @@ func (s *decCoinTestSuite) TestDecCoinsQuoDecTruncate() {
}
for i, tc := range testCases {
tc := tc
if tc.panics {
s.Require().Panics(func() { tc.coins.QuoDecTruncate(tc.input) })
} else {
@ -564,7 +560,6 @@ func (s *decCoinTestSuite) TestNewDecCoinsWithIsValid() {
}
for _, tc := range tests {
tc := tc
if tc.expectPass {
s.Require().True(tc.coin.IsValid(), tc.msg)
} else {
@ -591,7 +586,6 @@ func (s *decCoinTestSuite) TestNewDecCoinsWithZeroCoins() {
}
for _, tc := range tests {
tc := tc
s.Require().Equal(sdk.NewDecCoinsFromCoins(tc.coins...).Len(), tc.expectLength)
}
}
@ -623,7 +617,6 @@ func (s *decCoinTestSuite) TestDecCoins_AddDecCoinWithIsValid() {
}
for _, tc := range tests {
tc := tc
if tc.expectPass {
s.Require().True(tc.coin.IsValid(), tc.msg)
} else {
@ -679,7 +672,6 @@ func (s *decCoinTestSuite) TestDecCoins_GetDenomByIndex() {
}
for i, tc := range testCases {
tc := tc
s.T().Run(tc.name, func(t *testing.T) {
if tc.expectedErr {
s.Require().Panics(func() { tc.input.GetDenomByIndex(tc.index) }, "Test should have panicked")
@ -721,7 +713,6 @@ func (s *decCoinTestSuite) TestDecCoins_IsAllPositive() {
}
for i, tc := range testCases {
tc := tc
s.T().Run(tc.name, func(t *testing.T) {
if tc.expectedResult {
s.Require().True(tc.input.IsAllPositive(), "Test case #%d: %s", i, tc.name)
@ -791,7 +782,6 @@ func (s *decCoinTestSuite) TestDecCoin_IsGTE() {
}
for i, tc := range testCases {
tc := tc
s.T().Run(tc.name, func(t *testing.T) {
if tc.expectedPanic {
s.Require().Panics(func() { tc.coin.IsGTE(tc.otherCoin) }, "Test case #%d: %s", i, tc.name)
@ -835,7 +825,6 @@ func (s *decCoinTestSuite) TestDecCoins_IsZero() {
}
for i, tc := range testCases {
tc := tc
s.T().Run(tc.name, func(t *testing.T) {
if tc.expectedResult {
s.Require().True(tc.coins.IsZero(), "Test case #%d: %s", i, tc.name)
@ -890,7 +879,6 @@ func (s *decCoinTestSuite) TestDecCoins_MulDec() {
}
for i, tc := range testCases {
tc := tc
s.T().Run(tc.name, func(t *testing.T) {
res := tc.coins.MulDec(tc.multiplier)
s.Require().Equal(tc.expectedResult, res, "Test case #%d: %s", i, tc.name)
@ -939,7 +927,6 @@ func (s *decCoinTestSuite) TestDecCoins_MulDecTruncate() {
}
for i, tc := range testCases {
tc := tc
s.T().Run(tc.name, func(t *testing.T) {
if tc.expectedPanic {
s.Require().Panics(func() { tc.coins.MulDecTruncate(tc.multiplier) }, "Test case #%d: %s", i, tc.name)
@ -992,7 +979,6 @@ func (s *decCoinTestSuite) TestDecCoins_QuoDec() {
}
for i, tc := range testCases {
tc := tc
s.T().Run(tc.name, func(t *testing.T) {
if tc.panics {
s.Require().Panics(func() { tc.coins.QuoDec(tc.input) }, "Test case #%d: %s", i, tc.name)

View File

@ -268,7 +268,6 @@ func (s *eventsTestSuite) TestMarkEventsToIndex() {
}
for name, tc := range testCases {
tc := tc
s.T().Run(name, func(_ *testing.T) {
s.Require().Equal(tc.expected, sdk.MarkEventsToIndex(tc.events, tc.indexSet))
})

View File

@ -594,7 +594,6 @@ func (m *Manager) assertNoForgottenModules(setOrderFnName string, moduleNames []
}
var missing []string
for m := range m.Modules {
m := m
if pass != nil && pass(m) {
continue
}
@ -834,7 +833,6 @@ func (m *Manager) GetVersionMap() appmodule.VersionMap {
if v, ok := v.(appmodule.HasConsensusVersion); ok {
version = v.ConsensusVersion()
}
name := name
vermap[name] = version
}

View File

@ -140,7 +140,6 @@ func TestCollectionPagination(t *testing.T) {
}
for name, tc := range tcs {
tc := tc
t.Run(name, func(t *testing.T) {
gotResults, gotResponse, err := CollectionFilteredPaginate(
ctx,

View File

@ -24,7 +24,6 @@ func TestRandomAccounts(t *testing.T) {
{"100-accounts", 100, 100},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
got := simulation.RandomAccounts(r, tt.n)
require.Equal(t, tt.want, len(got))
@ -66,8 +65,6 @@ func TestRandomFees(t *testing.T) {
{"1 coin with 0 amount", sdk.Coins{sdk.NewInt64Coin("ccc", 0)}, true, true},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
got, err := simulation.RandomFees(r, tt.spendableCoins)
if (err != nil) != tt.wantErr {

View File

@ -28,7 +28,6 @@ func TestRandSubsetCoins(t *testing.T) {
{"too small amount", rand.New(rand.NewSource(99)), sdk.Coins{sdk.Coin{Denom: "aaa", Amount: math.NewInt(0)}}},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
got := simulation.RandSubsetCoins(tt.r, tt.coins)
gotStringRep := got.String()

View File

@ -33,7 +33,6 @@ func TestSignDocDirectAux(t *testing.T) {
}
for _, tc := range testcases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
err := tc.sd.ValidateBasic()
@ -68,7 +67,6 @@ func TestAuxSignerData(t *testing.T) {
}
for _, tc := range testcases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
err := tc.sd.ValidateBasic()

Some files were not shown because too many files have changed in this diff Show More