refactor(x/group)!: use router service (#19638)
This commit is contained in:
parent
3cbdf5ac13
commit
3e63309220
@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/cosmos/gogoproto/proto"
|
||||
protov2 "google.golang.org/protobuf/proto"
|
||||
@ -59,6 +60,8 @@ func (m *msgRouterService) CanInvoke(ctx context.Context, typeURL string) error
|
||||
return fmt.Errorf("missing type url")
|
||||
}
|
||||
|
||||
typeURL = strings.TrimPrefix(typeURL, "/")
|
||||
|
||||
handler := m.router.HybridHandlerByMsgName(typeURL)
|
||||
if handler == nil {
|
||||
return fmt.Errorf("unknown message: %s", typeURL)
|
||||
@ -114,6 +117,8 @@ func (m *queryRouterService) CanInvoke(ctx context.Context, typeURL string) erro
|
||||
return fmt.Errorf("missing type url")
|
||||
}
|
||||
|
||||
typeURL = strings.TrimPrefix(typeURL, "/")
|
||||
|
||||
handlers := m.router.HybridHandlerByRequestName(typeURL)
|
||||
if len(handlers) == 0 {
|
||||
return fmt.Errorf("unknown request: %s", typeURL)
|
||||
|
||||
@ -359,7 +359,7 @@ func NewSimApp(
|
||||
config.MaxProposalTitleLen = 255 // example max title length in characters
|
||||
config.MaxProposalSummaryLen = 10200 // example max summary length in characters
|
||||
*/
|
||||
app.GroupKeeper = groupkeeper.NewKeeper(runtime.NewEnvironment(runtime.NewKVStoreService(keys[group.StoreKey]), logger), appCodec, app.MsgServiceRouter(), app.AuthKeeper, groupConfig)
|
||||
app.GroupKeeper = groupkeeper.NewKeeper(runtime.NewEnvironment(runtime.NewKVStoreService(keys[group.StoreKey]), logger, runtime.EnvWithRouterService(app.GRPCQueryRouter(), app.MsgServiceRouter())), appCodec, app.AuthKeeper, groupConfig)
|
||||
|
||||
// get skipUpgradeHeights from the app options
|
||||
skipUpgradeHeights := map[int64]bool{}
|
||||
|
||||
@ -30,9 +30,8 @@ Ref: https://keepachangelog.com/en/1.0.0/
|
||||
* [#18448](https://github.com/cosmos/cosmos-sdk/pull/18448) Extend group config
|
||||
* [18286](https://github.com/cosmos/cosmos-sdk/pull/18286) Move prefix store creation down after error checks.
|
||||
|
||||
### Features
|
||||
|
||||
### API Breaking Changes
|
||||
|
||||
* [#19638](https://github.com/cosmos/cosmos-sdk/pull/19638) Migrate module to use `appmodule.Environment` router service so no `baseapp.MessageRouter` is required is `NewKeeper` anymore.
|
||||
* [#19489](https://github.com/cosmos/cosmos-sdk/pull/19489) `appmodule.Environment` is received on the Keeper to get access to different application services.
|
||||
* [#19410](https://github.com/cosmos/cosmos-sdk/pull/19410) Migrate to Store Service.
|
||||
|
||||
@ -53,7 +53,6 @@ func (s *GenesisTestSuite) SetupTest() {
|
||||
storeService := runtime.NewKVStoreService(key)
|
||||
testCtx := testutil.DefaultContextWithDB(s.T(), key, storetypes.NewTransientStoreKey("transient_test"))
|
||||
encCfg := moduletestutil.MakeTestEncodingConfig(module.AppModule{})
|
||||
env := runtime.NewEnvironment(storeService, log.NewNopLogger())
|
||||
|
||||
ctrl := gomock.NewController(s.T())
|
||||
accountKeeper := grouptestutil.NewMockAccountKeeper(ctrl)
|
||||
@ -74,7 +73,8 @@ func (s *GenesisTestSuite) SetupTest() {
|
||||
s.cdc = codec.NewProtoCodec(encCfg.InterfaceRegistry)
|
||||
s.ctx = s.sdkCtx
|
||||
|
||||
s.keeper = keeper.NewKeeper(env, s.cdc, bApp.MsgServiceRouter(), accountKeeper, group.DefaultConfig())
|
||||
env := runtime.NewEnvironment(storeService, log.NewNopLogger(), runtime.EnvWithRouterService(bApp.GRPCQueryRouter(), bApp.MsgServiceRouter()))
|
||||
s.keeper = keeper.NewKeeper(env, s.cdc, accountKeeper, group.DefaultConfig())
|
||||
}
|
||||
|
||||
func (s *GenesisTestSuite) TestInitExportGenesis() {
|
||||
|
||||
@ -43,7 +43,6 @@ func initKeeper(t *testing.T) *fixture {
|
||||
)
|
||||
|
||||
key := storetypes.NewKVStoreKey(group.StoreKey)
|
||||
storeService := runtime.NewKVStoreService(key)
|
||||
testCtx := testutil.DefaultContextWithDB(t, key, storetypes.NewTransientStoreKey("transient_test"))
|
||||
encCfg := moduletestutil.MakeTestEncodingConfig(module.AppModule{})
|
||||
|
||||
@ -69,9 +68,9 @@ func initKeeper(t *testing.T) *fixture {
|
||||
accountKeeper.EXPECT().NewAccount(gomock.Any(), gomock.Any()).Return(nil).AnyTimes()
|
||||
accountKeeper.EXPECT().SetAccount(gomock.Any(), gomock.Any()).AnyTimes()
|
||||
|
||||
env := runtime.NewEnvironment(storeService, log.NewNopLogger())
|
||||
env := runtime.NewEnvironment(runtime.NewKVStoreService(key), log.NewNopLogger(), runtime.EnvWithRouterService(bApp.GRPCQueryRouter(), bApp.MsgServiceRouter()))
|
||||
|
||||
groupKeeper = groupkeeper.NewKeeper(env, encCfg.Codec, bApp.MsgServiceRouter(), accountKeeper, group.DefaultConfig())
|
||||
groupKeeper = groupkeeper.NewKeeper(env, encCfg.Codec, accountKeeper, group.DefaultConfig())
|
||||
queryHelper := baseapp.NewQueryServerTestHelper(ctx, interfaceRegistry)
|
||||
group.RegisterQueryServer(queryHelper, groupKeeper)
|
||||
queryClient := group.NewQueryClient(queryHelper)
|
||||
|
||||
@ -12,7 +12,6 @@ import (
|
||||
"cosmossdk.io/x/group/errors"
|
||||
"cosmossdk.io/x/group/internal/orm"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/baseapp"
|
||||
"github.com/cosmos/cosmos-sdk/codec"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
)
|
||||
@ -75,18 +74,15 @@ type Keeper struct {
|
||||
voteByProposalIndex orm.Index
|
||||
voteByVoterIndex orm.Index
|
||||
|
||||
router baseapp.MessageRouter
|
||||
|
||||
config group.Config
|
||||
|
||||
cdc codec.Codec
|
||||
}
|
||||
|
||||
// NewKeeper creates a new group keeper.
|
||||
func NewKeeper(env appmodule.Environment, cdc codec.Codec, router baseapp.MessageRouter, accKeeper group.AccountKeeper, config group.Config) Keeper {
|
||||
func NewKeeper(env appmodule.Environment, cdc codec.Codec, accKeeper group.AccountKeeper, config group.Config) Keeper {
|
||||
k := Keeper{
|
||||
environment: env,
|
||||
router: router,
|
||||
accKeeper: accKeeper,
|
||||
cdc: cdc,
|
||||
}
|
||||
|
||||
@ -52,14 +52,11 @@ type TestSuite struct {
|
||||
func (s *TestSuite) SetupTest() {
|
||||
s.blockTime = time.Now().Round(0).UTC()
|
||||
key := storetypes.NewKVStoreKey(group.StoreKey)
|
||||
storeService := runtime.NewKVStoreService(key)
|
||||
|
||||
testCtx := testutil.DefaultContextWithDB(s.T(), key, storetypes.NewTransientStoreKey("transient_test"))
|
||||
encCfg := moduletestutil.MakeTestEncodingConfig(module.AppModule{}, bank.AppModule{})
|
||||
s.addrs = simtestutil.CreateIncrementalAccounts(6)
|
||||
|
||||
env := runtime.NewEnvironment(storeService, log.NewNopLogger())
|
||||
|
||||
// setup gomock and initialize some globally expected executions
|
||||
ctrl := gomock.NewController(s.T())
|
||||
s.accountKeeper = grouptestutil.NewMockAccountKeeper(ctrl)
|
||||
@ -79,8 +76,9 @@ func (s *TestSuite) SetupTest() {
|
||||
bApp.SetInterfaceRegistry(encCfg.InterfaceRegistry)
|
||||
banktypes.RegisterMsgServer(bApp.MsgServiceRouter(), s.bankKeeper)
|
||||
|
||||
env := runtime.NewEnvironment(runtime.NewKVStoreService(key), log.NewNopLogger(), runtime.EnvWithRouterService(bApp.GRPCQueryRouter(), bApp.MsgServiceRouter()))
|
||||
config := group.DefaultConfig()
|
||||
s.groupKeeper = keeper.NewKeeper(env, encCfg.Codec, bApp.MsgServiceRouter(), s.accountKeeper, config)
|
||||
s.groupKeeper = keeper.NewKeeper(env, encCfg.Codec, s.accountKeeper, config)
|
||||
s.ctx = testCtx.Ctx.WithHeaderInfo(header.Info{Time: s.blockTime})
|
||||
s.sdkCtx = sdk.UnwrapSDKContext(s.ctx)
|
||||
|
||||
|
||||
@ -850,27 +850,21 @@ func (k Keeper) Exec(goCtx context.Context, msg *group.MsgExec) (*group.MsgExecR
|
||||
// Execute proposal payload.
|
||||
var logs string
|
||||
if proposal.Status == group.PROPOSAL_STATUS_ACCEPTED && proposal.ExecutorResult != group.PROPOSAL_EXECUTOR_RESULT_SUCCESS {
|
||||
// Caching context so that we don't update the store in case of failure.
|
||||
cacheCtx, flush := ctx.CacheContext()
|
||||
|
||||
addr, err := k.accKeeper.AddressCodec().StringToBytes(policyInfo.Address)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
decisionPolicy := policyInfo.DecisionPolicy.GetCachedValue().(group.DecisionPolicy)
|
||||
if results, err := k.doExecuteMsgs(cacheCtx, k.router, proposal, addr, decisionPolicy); err != nil {
|
||||
|
||||
if err := k.environment.BranchService.Execute(ctx, func(ctx context.Context) error {
|
||||
return k.doExecuteMsgs(ctx, proposal, addr, decisionPolicy)
|
||||
}); err != nil {
|
||||
proposal.ExecutorResult = group.PROPOSAL_EXECUTOR_RESULT_FAILURE
|
||||
logs = fmt.Sprintf("proposal execution failed on proposal %d, because of error %s", proposal.Id, err.Error())
|
||||
k.Logger().Info("proposal execution failed", "cause", err, "proposalID", proposal.Id)
|
||||
} else {
|
||||
proposal.ExecutorResult = group.PROPOSAL_EXECUTOR_RESULT_SUCCESS
|
||||
flush()
|
||||
|
||||
for _, res := range results {
|
||||
// NOTE: The sdk msg handler creates a new EventManager, so events must be correctly propagated back to the current context
|
||||
ctx.EventManager().EmitEvents(res.GetEvents())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -2,13 +2,12 @@ package keeper
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"context"
|
||||
|
||||
errorsmod "cosmossdk.io/errors"
|
||||
"cosmossdk.io/x/group"
|
||||
"cosmossdk.io/x/group/errors"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/baseapp"
|
||||
"github.com/cosmos/cosmos-sdk/codec"
|
||||
sdk "github.com/cosmos/cosmos-sdk/types"
|
||||
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
|
||||
@ -16,12 +15,13 @@ import (
|
||||
|
||||
// doExecuteMsgs routes the messages to the registered handlers. Messages are limited to those that require no authZ or
|
||||
// by the account of group policy only. Otherwise this gives access to other peoples accounts as the sdk middlewares are bypassed
|
||||
// TODO: use context.Context and env bundler service once baseapp's MsgServiceHandler is migrated to use context.Context
|
||||
func (s Keeper) doExecuteMsgs(ctx sdk.Context, router baseapp.MessageRouter, proposal group.Proposal, groupPolicyAcc sdk.AccAddress, decisionPolicy group.DecisionPolicy) ([]sdk.Result, error) {
|
||||
func (k Keeper) doExecuteMsgs(ctx context.Context, proposal group.Proposal, groupPolicyAcc sdk.AccAddress, decisionPolicy group.DecisionPolicy) error {
|
||||
currentTime := k.environment.HeaderService.GetHeaderInfo(ctx).Time
|
||||
|
||||
// Ensure it's not too early to execute the messages.
|
||||
minExecutionDate := proposal.SubmitTime.Add(decisionPolicy.GetMinExecutionPeriod())
|
||||
if ctx.HeaderInfo().Time.Before(minExecutionDate) {
|
||||
return nil, errors.ErrInvalid.Wrapf("must wait until %s to execute proposal %d", minExecutionDate, proposal.Id)
|
||||
if currentTime.Before(minExecutionDate) {
|
||||
return errors.ErrInvalid.Wrapf("must wait until %s to execute proposal %d", minExecutionDate, proposal.Id)
|
||||
}
|
||||
|
||||
// Ensure it's not too late to execute the messages.
|
||||
@ -29,37 +29,26 @@ func (s Keeper) doExecuteMsgs(ctx sdk.Context, router baseapp.MessageRouter, pro
|
||||
// be pruned automatically, so this function should not even be called, as
|
||||
// the proposal doesn't exist in state. For sanity check, we can still keep
|
||||
// this simple and cheap check.
|
||||
expiryDate := proposal.VotingPeriodEnd.Add(s.config.MaxExecutionPeriod)
|
||||
if expiryDate.Before(ctx.HeaderInfo().Time) {
|
||||
return nil, errors.ErrExpired.Wrapf("proposal expired on %s", expiryDate)
|
||||
expiryDate := proposal.VotingPeriodEnd.Add(k.config.MaxExecutionPeriod)
|
||||
if expiryDate.Before(currentTime) {
|
||||
return errors.ErrExpired.Wrapf("proposal expired on %s", expiryDate)
|
||||
}
|
||||
|
||||
msgs, err := proposal.GetMsgs()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
results := make([]sdk.Result, len(msgs))
|
||||
if err := ensureMsgAuthZ(msgs, groupPolicyAcc, s.cdc); err != nil {
|
||||
return nil, err
|
||||
if err := ensureMsgAuthZ(msgs, groupPolicyAcc, k.cdc); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i, msg := range msgs {
|
||||
handler := s.router.Handler(msg)
|
||||
if handler == nil {
|
||||
return nil, errorsmod.Wrapf(errors.ErrInvalid, "no message handler found for %q", sdk.MsgTypeURL(msg))
|
||||
if _, err := k.environment.RouterService.MessageRouterService().InvokeUntyped(ctx, msg); err != nil {
|
||||
return errorsmod.Wrapf(err, "message %s at position %d", sdk.MsgTypeURL(msg), i)
|
||||
}
|
||||
r, err := handler(ctx, msg)
|
||||
if err != nil {
|
||||
return nil, errorsmod.Wrapf(err, "message %s at position %d", sdk.MsgTypeURL(msg), i)
|
||||
}
|
||||
// Handler should always return non-nil sdk.Result.
|
||||
if r == nil {
|
||||
return nil, fmt.Errorf("got nil sdk.Result for message %q at position %d", msg, i)
|
||||
}
|
||||
|
||||
results[i] = *r
|
||||
}
|
||||
return results, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
// ensureMsgAuthZ checks that if a message requires signers that all of them
|
||||
|
||||
@ -8,7 +8,6 @@ import (
|
||||
"cosmossdk.io/x/group"
|
||||
"cosmossdk.io/x/group/keeper"
|
||||
|
||||
"github.com/cosmos/cosmos-sdk/baseapp"
|
||||
"github.com/cosmos/cosmos-sdk/codec"
|
||||
cdctypes "github.com/cosmos/cosmos-sdk/codec/types"
|
||||
)
|
||||
@ -28,13 +27,12 @@ func init() {
|
||||
type GroupInputs struct {
|
||||
depinject.In
|
||||
|
||||
Config *modulev1.Module
|
||||
Environment appmodule.Environment
|
||||
Cdc codec.Codec
|
||||
AccountKeeper group.AccountKeeper
|
||||
BankKeeper group.BankKeeper
|
||||
Registry cdctypes.InterfaceRegistry
|
||||
MsgServiceRouter baseapp.MessageRouter
|
||||
Config *modulev1.Module
|
||||
Environment appmodule.Environment
|
||||
Cdc codec.Codec
|
||||
AccountKeeper group.AccountKeeper
|
||||
BankKeeper group.BankKeeper
|
||||
Registry cdctypes.InterfaceRegistry
|
||||
}
|
||||
|
||||
type GroupOutputs struct {
|
||||
@ -47,7 +45,6 @@ type GroupOutputs struct {
|
||||
func ProvideModule(in GroupInputs) GroupOutputs {
|
||||
k := keeper.NewKeeper(in.Environment,
|
||||
in.Cdc,
|
||||
in.MsgServiceRouter,
|
||||
in.AccountKeeper,
|
||||
group.Config{
|
||||
MaxExecutionPeriod: in.Config.MaxExecutionPeriod.AsDuration(),
|
||||
|
||||
Loading…
Reference in New Issue
Block a user